language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/internal/WrapVisitor.java
|
{
"start": 1207,
"end": 6078
}
|
class ____ extends ProxyVisitor {
protected Object entity;
protected Object id;
private boolean substitute;
public WrapVisitor(Object entity, Object id, EventSource session) {
super( session );
this.entity = entity;
this.id = id;
}
public boolean isSubstitutionRequired() {
return substitute;
}
@Override
protected Object processCollection(Object collection, CollectionType collectionType)
throws HibernateException {
if ( collection == null || collection == LazyPropertyInitializer.UNFETCHED_PROPERTY ) {
return null;
}
else if ( collection instanceof PersistentCollection<?> persistentCollection ) {
if ( persistentCollection.setCurrentSession( getSession() ) ) {
reattachCollection( persistentCollection, collectionType );
}
return null;
}
else {
return processArrayOrNewCollection( collection, collectionType );
}
}
final Object processArrayOrNewCollection(Object collection, CollectionType collectionType)
throws HibernateException {
if ( collection == null ) {
//do nothing
return null;
}
else {
final var session = getSession();
final var persister =
session.getFactory().getMappingMetamodel()
.getCollectionDescriptor( collectionType.getRole() );
final var persistenceContext = session.getPersistenceContextInternal();
//TODO: move into collection type, so we can use polymorphism!
if ( collectionType.hasHolder() ) {
if ( collection != CollectionType.UNFETCHED_COLLECTION
&& persistenceContext.getCollectionHolder( collection ) == null ) {
final var collectionHolder = collectionType.wrap( session, collection );
persistenceContext.addNewCollection( persister, collectionHolder );
persistenceContext.addCollectionHolder( collectionHolder );
}
return null;
}
else {
if ( isPersistentAttributeInterceptable( entity ) ) {
final var attributeInterceptor =
asPersistentAttributeInterceptable( entity ).$$_hibernate_getInterceptor();
if ( attributeInterceptor instanceof EnhancementAsProxyLazinessInterceptor ) {
return null;
}
else if ( attributeInterceptor instanceof LazyAttributeLoadingInterceptor lazyLoadingInterceptor ) {
if ( lazyLoadingInterceptor.isAttributeLoaded( persister.getAttributeMapping().getAttributeName() ) ) {
final var entry = persistenceContext.getEntry( entity );
if ( entry.isExistsInDatabase() ) {
final Object key = getCollectionKey( persister, entity, entry, session );
if ( key != null ) {
var collectionInstance =
persistenceContext.getCollection( new CollectionKey( persister, key ) );
if ( collectionInstance == null ) {
// the collection has not been initialized and new collection values have been assigned,
// we need to be sure to delete all the collection elements before inserting the new ones
collectionInstance =
persister.getCollectionSemantics()
.instantiateWrapper( key, persister, session );
persistenceContext.addUninitializedCollection( persister, collectionInstance, key );
persistenceContext.getCollectionEntry( collectionInstance ).setDoremove( true );
}
}
}
}
}
}
final var persistentCollection = collectionType.wrap( session, collection );
persistenceContext.addNewCollection( persister, persistentCollection );
if ( EVENT_LISTENER_LOGGER.isTraceEnabled() ) {
EVENT_LISTENER_LOGGER.wrappedCollectionInRole( collectionType.getRole() );
}
return persistentCollection; //Force a substitution!
}
}
}
@Override
protected void processValue(int i, Object[] values, Type[] types) {
final Object result = processValue( values[i], types[i] );
if ( result != null ) {
substitute = true;
values[i] = result;
}
}
@Override
protected Object processComponent(Object component, CompositeType componentType) throws HibernateException {
if ( component != null ) {
final Object[] values = componentType.getPropertyValues( component, getSession() );
final Type[] types = componentType.getSubtypes();
boolean substituteComponent = false;
for ( int i = 0; i < types.length; i++ ) {
final Object result = processValue( values[i], types[i] );
if ( result != null ) {
values[i] = result;
substituteComponent = true;
}
}
if ( substituteComponent ) {
componentType.setPropertyValues( component, values );
}
}
return null;
}
@Override
public void process(Object object, EntityPersister persister) throws HibernateException {
final Object[] values = persister.getValues( object );
final Type[] types = persister.getPropertyTypes();
processEntityPropertyValues( values, types );
if ( isSubstitutionRequired() ) {
persister.setValues( object, values );
}
}
}
|
WrapVisitor
|
java
|
processing__processing4
|
app/src/processing/app/syntax/InputHandler.java
|
{
"start": 25608,
"end": 26773
}
|
class ____ implements ActionListener
{
private boolean select;
public next_word(boolean select)
{
this.select = select;
}
public void actionPerformed(ActionEvent evt)
{
JEditTextArea textArea = getTextArea(evt);
int caret = textArea.getCaretPosition();
int line = textArea.getCaretLine();
int lineStart = textArea.getLineStartOffset(line);
caret -= lineStart;
String lineText = textArea.getLineText(textArea
.getCaretLine());
if (caret == lineText.length())
{
if (lineStart + caret == textArea.getDocumentLength())
{
textArea.getToolkit().beep();
return;
}
caret++;
}
else
{
String noWordSep = (String)textArea.getDocument().getProperty("noWordSep");
caret = findWordEnd(lineText,caret,noWordSep);
}
if (select)
textArea.select(textArea.getMarkPosition(),
lineStart + caret);
else
textArea.setCaretPosition(lineStart + caret);
}
}
public static
|
next_word
|
java
|
apache__camel
|
components/camel-undertow/src/test/java/org/apache/camel/component/undertow/rest/RestUndertowHttpMultipleVerbsTest.java
|
{
"start": 1091,
"end": 2417
}
|
class ____ extends BaseUndertowTest {
@Test
public void testProducerGetPut() throws Exception {
getMockEndpoint("mock:get").expectedMessageCount(1);
getMockEndpoint("mock:put").expectedMessageCount(0);
template.requestBodyAndHeader("undertow:http://localhost:{{port}}/example/123", null, Exchange.HTTP_METHOD, "GET");
MockEndpoint.assertIsSatisfied(context);
MockEndpoint.resetMocks(context);
getMockEndpoint("mock:get").expectedMessageCount(0);
getMockEndpoint("mock:put").expectedMessageCount(1);
template.requestBodyAndHeader("undertow:http://localhost:{{port}}/example/456", "Hello World", Exchange.HTTP_METHOD,
"PUT");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// configure to use undertow on localhost with the given port
restConfiguration().component("undertow").host("localhost").port(getPort());
rest("/example")
.get("{pathParamHere}").to("mock:get")
.put("{pathParamHere}").to("mock:put");
}
};
}
}
|
RestUndertowHttpMultipleVerbsTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ReferenceEqualityTest.java
|
{
"start": 14553,
"end": 15347
}
|
class ____ {
private static final Comparator<String> comparator1 =
(o1, o2) -> {
if (o1 == o2) {
return 0;
} else if (o1 == null) {
return -1;
} else if (o2 == null) {
return 1;
} else {
return -1;
}
};
private static final Comparator<String> comparator2 =
(o1, o2) -> {
return o1 == o2 ? 0 : -1;
};
private static final Comparator<String> comparator3 = (o1, o2) -> o1 == o2 ? 0 : -1;
}
""")
.expectNoDiagnostics()
.doTest();
}
}
|
Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionTests.java
|
{
"start": 391,
"end": 6944
}
|
class ____ extends ESTestCase {
public void testStringCtorOrderingSemver() {
assertTrue(new Version("1").compareTo(new Version("1.0")) < 0);
assertTrue(new Version("1.0").compareTo(new Version("1.0.0.0.0.0.0.0.0.1")) < 0);
assertTrue(new Version("1.0.0").compareTo(new Version("1.0.0.0.0.0.0.0.0.1")) < 0);
assertTrue(new Version("1.0.0").compareTo(new Version("2.0.0")) < 0);
assertTrue(new Version("2.0.0").compareTo(new Version("11.0.0")) < 0);
assertTrue(new Version("2.0.0").compareTo(new Version("2.1.0")) < 0);
assertTrue(new Version("2.1.0").compareTo(new Version("2.1.1")) < 0);
assertTrue(new Version("2.1.1").compareTo(new Version("2.1.1.0")) < 0);
assertTrue(new Version("2.0.0").compareTo(new Version("11.0.0")) < 0);
assertTrue(new Version("1.0.0").compareTo(new Version("2.0")) < 0);
assertTrue(new Version("1.0.0-a").compareTo(new Version("1.0.0-b")) < 0);
assertTrue(new Version("1.0.0-1.0.0").compareTo(new Version("1.0.0-2.0")) < 0);
assertTrue(new Version("1.0.0-alpha").compareTo(new Version("1.0.0-alpha.1")) < 0);
assertTrue(new Version("1.0.0-alpha.1").compareTo(new Version("1.0.0-alpha.beta")) < 0);
assertTrue(new Version("1.0.0-alpha.beta").compareTo(new Version("1.0.0-beta")) < 0);
assertTrue(new Version("1.0.0-beta").compareTo(new Version("1.0.0-beta.2")) < 0);
assertTrue(new Version("1.0.0-beta.2").compareTo(new Version("1.0.0-beta.11")) < 0);
assertTrue(new Version("1.0.0-beta11").compareTo(new Version("1.0.0-beta2")) < 0); // correct according to Semver specs
assertTrue(new Version("1.0.0-beta.11").compareTo(new Version("1.0.0-rc.1")) < 0);
assertTrue(new Version("1.0.0-rc.1").compareTo(new Version("1.0.0")) < 0);
assertTrue(new Version("1.0.0").compareTo(new Version("2.0.0-pre127")) < 0);
assertTrue(new Version("2.0.0-pre127").compareTo(new Version("2.0.0-pre128")) < 0);
assertTrue(new Version("2.0.0-pre128").compareTo(new Version("2.0.0-pre128-somethingelse")) < 0);
assertTrue(new Version("2.0.0-pre20201231z110026").compareTo(new Version("2.0.0-pre227")) < 0);
// invalid versions sort after valid ones
assertTrue(new Version("99999.99999.99999").compareTo(new Version("1.invalid")) < 0);
assertTrue(new Version("").compareTo(new Version("a")) < 0);
}
public void testBytesRefCtorOrderingSemver() {
assertTrue(new Version(encodeVersion("1")).compareTo(new Version(encodeVersion("1.0"))) < 0);
assertTrue(new Version(encodeVersion("1.0")).compareTo(new Version(encodeVersion("1.0.0.0.0.0.0.0.0.1"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0")).compareTo(new Version(encodeVersion("1.0.0.0.0.0.0.0.0.1"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0")).compareTo(new Version(encodeVersion("2.0.0"))) < 0);
assertTrue(new Version(encodeVersion("2.0.0")).compareTo(new Version(encodeVersion("11.0.0"))) < 0);
assertTrue(new Version(encodeVersion("2.0.0")).compareTo(new Version(encodeVersion("2.1.0"))) < 0);
assertTrue(new Version(encodeVersion("2.1.0")).compareTo(new Version(encodeVersion("2.1.1"))) < 0);
assertTrue(new Version(encodeVersion("2.1.1")).compareTo(new Version(encodeVersion("2.1.1.0"))) < 0);
assertTrue(new Version(encodeVersion("2.0.0")).compareTo(new Version(encodeVersion("11.0.0"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0")).compareTo(new Version(encodeVersion("2.0"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-a")).compareTo(new Version(encodeVersion("1.0.0-b"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-1.0.0")).compareTo(new Version(encodeVersion("1.0.0-2.0"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-alpha")).compareTo(new Version(encodeVersion("1.0.0-alpha.1"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-alpha.1")).compareTo(new Version(encodeVersion("1.0.0-alpha.beta"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-alpha.beta")).compareTo(new Version(encodeVersion("1.0.0-beta"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-beta")).compareTo(new Version(encodeVersion("1.0.0-beta.2"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-beta.2")).compareTo(new Version(encodeVersion("1.0.0-beta.11"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-beta11")).compareTo(new Version(encodeVersion("1.0.0-beta2"))) < 0); // correct
// according to
// Semver specs
assertTrue(new Version(encodeVersion("1.0.0-beta.11")).compareTo(new Version(encodeVersion("1.0.0-rc.1"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0-rc.1")).compareTo(new Version(encodeVersion("1.0.0"))) < 0);
assertTrue(new Version(encodeVersion("1.0.0")).compareTo(new Version(encodeVersion("2.0.0-pre127"))) < 0);
assertTrue(new Version(encodeVersion("2.0.0-pre127")).compareTo(new Version(encodeVersion("2.0.0-pre128"))) < 0);
assertTrue(new Version(encodeVersion("2.0.0-pre128")).compareTo(new Version(encodeVersion("2.0.0-pre128-somethingelse"))) < 0);
assertTrue(new Version(encodeVersion("2.0.0-pre20201231z110026")).compareTo(new Version(encodeVersion("2.0.0-pre227"))) < 0);
// invalid versions sort after valid ones
assertTrue(new Version(encodeVersion("99999.99999.99999")).compareTo(new Version(encodeVersion("1.invalid"))) < 0);
assertTrue(new Version(encodeVersion("")).compareTo(new Version(encodeVersion("a"))) < 0);
}
public void testConstructorsComparison() {
assertTrue(new Version(encodeVersion("1")).compareTo(new Version("1")) == 0);
assertTrue(new Version(encodeVersion("1.2.3")).compareTo(new Version("1.2.3")) == 0);
assertTrue(new Version(encodeVersion("1.2.3-rc1")).compareTo(new Version("1.2.3-rc1")) == 0);
assertTrue(new Version(encodeVersion("lkjlaskdjf")).compareTo(new Version("lkjlaskdjf")) == 0);
assertTrue(new Version(encodeVersion("99999.99999.99999")).compareTo(new Version("99999.99999.99999")) == 0);
}
private static BytesRef encodeVersion(String version) {
return VersionEncoder.encodeVersion(version).bytesRef;
}
}
|
VersionTests
|
java
|
apache__flink
|
flink-test-utils-parent/flink-test-utils-junit/src/test/java/org/apache/flink/testutils/junit/RetryOnFailureTest.java
|
{
"start": 1076,
"end": 2444
}
|
class ____ {
@Rule public RetryRule retryRule = new RetryRule();
private static final int NUMBER_OF_RUNS = 5;
private static int numberOfFailedRuns;
private static int numberOfSuccessfulRuns;
private static boolean firstRun = true;
@AfterClass
public static void verify() throws Exception {
assertThat(numberOfFailedRuns).isEqualTo(NUMBER_OF_RUNS + 1);
assertThat(numberOfSuccessfulRuns).isEqualTo(3);
}
@Test
@RetryOnFailure(times = NUMBER_OF_RUNS)
public void testRetryOnFailure() throws Exception {
// All but the (expected) last run should be successful
if (numberOfFailedRuns < NUMBER_OF_RUNS) {
numberOfFailedRuns++;
throw new RuntimeException("Expected test exception");
} else {
numberOfSuccessfulRuns++;
}
}
@Test
@RetryOnFailure(times = NUMBER_OF_RUNS)
public void testRetryOnceOnFailure() throws Exception {
if (firstRun) {
numberOfFailedRuns++;
firstRun = false;
throw new RuntimeException("Expected test exception");
} else {
numberOfSuccessfulRuns++;
}
}
@Test
@RetryOnFailure(times = NUMBER_OF_RUNS)
public void testDontRetryOnSuccess() throws Exception {
numberOfSuccessfulRuns++;
}
}
|
RetryOnFailureTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/SchemaManagementProvider.java
|
{
"start": 867,
"end": 1174
}
|
interface ____ {
/**
* Return the {@link SchemaManagement} for the specified {@link DataSource}.
* @param dataSource the dataSource to handle
* @return the {@link SchemaManagement} for the {@link DataSource}.
*/
SchemaManagement getSchemaManagement(DataSource dataSource);
}
|
SchemaManagementProvider
|
java
|
micronaut-projects__micronaut-core
|
inject-java-test/src/test/groovy/io/micronaut/inject/visitor/beans/MapOfListsWithAutomaticUnwrapping.java
|
{
"start": 271,
"end": 372
}
|
class ____ {
private Map<String, List<@MyMin OptionalInt>> map;
}
|
MapOfListsWithAutomaticUnwrapping
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/instrument/classloading/ReflectiveLoadTimeWeaverTests.java
|
{
"start": 3147,
"end": 3498
}
|
class ____ extends ClassLoader {
private int numTimesAddTransformerCalled = 0;
public int getNumTimesGetThrowawayClassLoaderCalled() {
return this.numTimesAddTransformerCalled;
}
public void addTransformer(ClassFileTransformer transformer) {
++this.numTimesAddTransformerCalled;
}
}
public static final
|
JustAddTransformerClassLoader
|
java
|
apache__camel
|
components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FromFtpRemoteFileFilterIT.java
|
{
"start": 1302,
"end": 2800
}
|
class ____ extends FtpServerTestSupport {
@BindToRegistry("myFilter")
private final MyFileFilter<Object> filter = new MyFileFilter<>();
private String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}/filefilter?password=admin&filter=#myFilter";
}
@Override
public void doPostSetup() throws Exception {
prepareFtpServer();
}
// Skip testing on AIX as it have an issue with this test with the file
// filter
@DisabledOnOs(OS.AIX)
@Test
public void testFtpFilter() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(2);
mock.expectedBodiesReceivedInAnyOrder("Report 1", "Report 2");
mock.assertIsSatisfied();
}
private void prepareFtpServer() {
// prepares the FTP Server by creating files on the server that we want
// to unit
// test that we can pool
sendFile(getFtpUrl(), "Hello World", "hello.txt");
sendFile(getFtpUrl(), "Report 1", "report1.txt");
sendFile(getFtpUrl(), "Bye World", "bye.txt");
sendFile(getFtpUrl(), "Report 2", "report2.txt");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(getFtpUrl()).convertBodyTo(String.class).to("mock:result");
}
};
}
// START SNIPPET: e1
public static
|
FromFtpRemoteFileFilterIT
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/TreeRangeMap.java
|
{
"start": 12941,
"end": 17819
}
|
class ____ implements RangeMap<K, V> {
private final Range<K> subRange;
SubRangeMap(Range<K> subRange) {
this.subRange = subRange;
}
@Override
public @Nullable V get(K key) {
return subRange.contains(key) ? TreeRangeMap.this.get(key) : null;
}
@Override
public @Nullable Entry<Range<K>, V> getEntry(K key) {
if (subRange.contains(key)) {
Entry<Range<K>, V> entry = TreeRangeMap.this.getEntry(key);
if (entry != null) {
return immutableEntry(entry.getKey().intersection(subRange), entry.getValue());
}
}
return null;
}
@Override
public Range<K> span() {
Cut<K> lowerBound;
Entry<Cut<K>, RangeMapEntry<K, V>> lowerEntry =
entriesByLowerBound.floorEntry(subRange.lowerBound);
if (lowerEntry != null
&& lowerEntry.getValue().getUpperBound().compareTo(subRange.lowerBound) > 0) {
lowerBound = subRange.lowerBound;
} else {
lowerBound = entriesByLowerBound.ceilingKey(subRange.lowerBound);
if (lowerBound == null || lowerBound.compareTo(subRange.upperBound) >= 0) {
throw new NoSuchElementException();
}
}
Cut<K> upperBound;
Entry<Cut<K>, RangeMapEntry<K, V>> upperEntry =
entriesByLowerBound.lowerEntry(subRange.upperBound);
if (upperEntry == null) {
throw new NoSuchElementException();
} else if (upperEntry.getValue().getUpperBound().compareTo(subRange.upperBound) >= 0) {
upperBound = subRange.upperBound;
} else {
upperBound = upperEntry.getValue().getUpperBound();
}
return Range.create(lowerBound, upperBound);
}
@Override
public void put(Range<K> range, V value) {
checkArgument(
subRange.encloses(range), "Cannot put range %s into a subRangeMap(%s)", range, subRange);
TreeRangeMap.this.put(range, value);
}
@Override
public void putCoalescing(Range<K> range, V value) {
if (entriesByLowerBound.isEmpty() || !subRange.encloses(range)) {
put(range, value);
return;
}
Range<K> coalescedRange = coalescedRange(range, checkNotNull(value));
// only coalesce ranges within the subRange
put(coalescedRange.intersection(subRange), value);
}
@Override
public void putAll(RangeMap<K, ? extends V> rangeMap) {
if (rangeMap.asMapOfRanges().isEmpty()) {
return;
}
Range<K> span = rangeMap.span();
checkArgument(
subRange.encloses(span),
"Cannot putAll rangeMap with span %s into a subRangeMap(%s)",
span,
subRange);
TreeRangeMap.this.putAll(rangeMap);
}
@Override
public void clear() {
TreeRangeMap.this.remove(subRange);
}
@Override
public void remove(Range<K> range) {
if (range.isConnected(subRange)) {
TreeRangeMap.this.remove(range.intersection(subRange));
}
}
@Override
public RangeMap<K, V> subRangeMap(Range<K> range) {
if (!range.isConnected(subRange)) {
return emptySubRangeMap();
} else {
return TreeRangeMap.this.subRangeMap(range.intersection(subRange));
}
}
@Override
public Map<Range<K>, V> asMapOfRanges() {
return new SubRangeMapAsMap();
}
@Override
public Map<Range<K>, V> asDescendingMapOfRanges() {
return new SubRangeMapAsMap() {
@Override
Iterator<Entry<Range<K>, V>> entryIterator() {
if (subRange.isEmpty()) {
return emptyIterator();
}
Iterator<RangeMapEntry<K, V>> backingItr =
entriesByLowerBound
.headMap(subRange.upperBound, false)
.descendingMap()
.values()
.iterator();
return new AbstractIterator<Entry<Range<K>, V>>() {
@Override
protected @Nullable Entry<Range<K>, V> computeNext() {
if (backingItr.hasNext()) {
RangeMapEntry<K, V> entry = backingItr.next();
if (entry.getUpperBound().compareTo(subRange.lowerBound) <= 0) {
return endOfData();
}
return immutableEntry(entry.getKey().intersection(subRange), entry.getValue());
}
return endOfData();
}
};
}
};
}
@Override
public boolean equals(@Nullable Object o) {
if (o instanceof RangeMap) {
RangeMap<?, ?> rangeMap = (RangeMap<?, ?>) o;
return asMapOfRanges().equals(rangeMap.asMapOfRanges());
}
return false;
}
@Override
public int hashCode() {
return asMapOfRanges().hashCode();
}
@Override
public String toString() {
return asMapOfRanges().toString();
}
|
SubRangeMap
|
java
|
dropwizard__dropwizard
|
dropwizard-health/src/main/java/io/dropwizard/health/response/HealthResponderFactory.java
|
{
"start": 628,
"end": 1637
}
|
interface ____ extends Discoverable {
/**
* Configures a health responder for responding to health check requests (e.g. from load balancer).
*
* @param name The name of the application.
* @param healthCheckUrlPaths The paths to expose a health check on.
* @param healthResponseProvider A provider of responses to respond to requests with.
* @param health The health environment.
* @param jersey The Jersey environment.
* @param servlets The servlet environment.
* @param mapper A Jackson object mapper to allow writing JSON responses (if needed).
*/
void configure(final String name, final Collection<String> healthCheckUrlPaths,
final HealthResponseProvider healthResponseProvider, final HealthEnvironment health,
final JerseyEnvironment jersey, final ServletEnvironment servlets, final ObjectMapper mapper);
}
|
HealthResponderFactory
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/serde/JsonSerdeTestUtil.java
|
{
"start": 1989,
"end": 6350
}
|
class ____ {
private JsonSerdeTestUtil() {
// no instantiation
}
static SerdeContext configuredSerdeContext() {
return configuredSerdeContext(
CatalogManagerMocks.createEmptyCatalogManager(), TableConfig.getDefault());
}
static SerdeContext configuredSerdeContext(Configuration configuration) {
final TableConfig tableConfig = TableConfig.getDefault();
tableConfig.addConfiguration(configuration);
return configuredSerdeContext(
CatalogManagerMocks.createEmptyCatalogManager(), configuration);
}
static SerdeContext configuredSerdeContext(
CatalogManager catalogManager, Configuration configuration) {
final TableConfig tableConfig = TableConfig.getDefault();
tableConfig.addConfiguration(configuration);
return configuredSerdeContext(catalogManager, tableConfig);
}
static SerdeContext configuredSerdeContext(
CatalogManager catalogManager, TableConfig tableConfig) {
final PlannerContext plannerContext =
PlannerMocks.newBuilder()
.withCatalogManager(catalogManager)
.withTableConfig(tableConfig)
.withRootSchema(
asRootSchema(new CatalogManagerCalciteSchema(catalogManager, true)))
.build()
.getPlannerContext();
return new SerdeContext(
new ParserImpl(null, null, plannerContext::createCalciteParser, null),
plannerContext.getFlinkContext(),
plannerContext.getTypeFactory(),
plannerContext.createFrameworkConfig().getOperatorTable());
}
static String toJson(SerdeContext serdeContext, Object object) throws IOException {
final ObjectWriter objectWriter =
CompiledPlanSerdeUtil.createJsonObjectWriter(serdeContext);
return objectWriter.writeValueAsString(object);
}
static <T> T toObject(SerdeContext serdeContext, String json, Class<T> clazz)
throws IOException {
final ObjectReader objectReader =
CompiledPlanSerdeUtil.createJsonObjectReader(serdeContext);
return objectReader.readValue(json, clazz);
}
static <T> T testJsonRoundTrip(SerdeContext serdeContext, T spec, Class<T> clazz)
throws IOException {
String actualJson = toJson(serdeContext, spec);
T actual = toObject(serdeContext, actualJson, clazz);
assertThat(actual).isEqualTo(spec);
assertThat(actualJson).isEqualTo(toJson(serdeContext, actual));
return actual;
}
static <T> T testJsonRoundTrip(T spec, Class<T> clazz) throws IOException {
return testJsonRoundTrip(configuredSerdeContext(), spec, clazz);
}
static void assertThatJsonContains(JsonNode json, String... path) {
JsonPointer jsonPointer = pathToPointer(path);
assertThat(json)
.asInstanceOf(type(ObjectNode.class))
.as("Serialized json '%s'", json)
.matches(
o -> {
JsonNode node = o.at(jsonPointer);
return !node.isMissingNode() && !node.isNull();
},
String.format("contains at pointer '%s' a not null value", jsonPointer));
}
static void assertThatJsonDoesNotContain(JsonNode json, String... path) {
JsonPointer jsonPointer = pathToPointer(path);
assertThat(json)
.asInstanceOf(type(ObjectNode.class))
.as("Serialized json '%s'", json)
.matches(
o -> {
JsonNode node = o.at(jsonPointer);
return node.isMissingNode() || node.isNull();
},
String.format(
"at pointer '%s' return missing node or null node", jsonPointer));
}
private static JsonPointer pathToPointer(String... path) {
JsonPointer pointer = JsonPointer.empty();
for (String el : path) {
pointer = pointer.append(JsonPointer.compile(JsonPointer.SEPARATOR + el));
}
return pointer;
}
}
|
JsonSerdeTestUtil
|
java
|
quarkusio__quarkus
|
devtools/maven/src/test/java/io/quarkus/maven/TestDependencyTreeMojoTest.java
|
{
"start": 34,
"end": 180
}
|
class ____ extends BasicDependencyTreeTestBase {
@Override
protected String mode() {
return "test";
}
}
|
TestDependencyTreeMojoTest
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/qualifiers/composite/Runner.java
|
{
"start": 751,
"end": 834
}
|
class ____ implements Runnable {
@Override
public void run() {
}
}
|
Runner
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/shuffle/ShuffleMasterContext.java
|
{
"start": 1280,
"end": 1500
}
|
interface ____ {
/**
* @return the cluster configuration.
*/
Configuration getConfiguration();
/** Handles the fatal error if any. */
void onFatalError(Throwable throwable);
}
|
ShuffleMasterContext
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponseTests.java
|
{
"start": 797,
"end": 3725
}
|
class ____ extends ESTestCase {
static Map<String, IndexFieldCapabilities> randomFieldCaps() {
final Map<String, IndexFieldCapabilities> fieldCaps = new HashMap<>();
final Map<String, String> meta = switch (randomInt(2)) {
case 0 -> Map.of();
case 1 -> Map.of("key", "value");
default -> Map.of("key1", "value1", "key2", "value2");
};
final TimeSeriesParams.MetricType metricType = randomBoolean() ? null : randomFrom(TimeSeriesParams.MetricType.values());
final List<String> fields = randomList(1, 5, () -> randomAlphaOfLength(5));
for (String field : fields) {
final IndexFieldCapabilities fieldCap = new IndexFieldCapabilities(
field,
randomAlphaOfLengthBetween(5, 20),
randomBoolean(),
randomBoolean(),
randomBoolean(),
randomBoolean(),
metricType,
meta
);
fieldCaps.put(field, fieldCap);
}
return fieldCaps;
}
static Map<String, List<String>> randomMappingHashToIndices() {
Map<String, List<String>> mappingHashToIndices = new HashMap<>();
int numGroups = between(0, 10);
for (int g = 0; g < numGroups; g++) {
String mappingHash = "mapping_hash_" + g;
String group = "group_" + g;
List<String> indices = IntStream.range(0, between(1, 10)).mapToObj(n -> group + "_index_" + n).toList();
mappingHashToIndices.put(mappingHash, indices);
}
return mappingHashToIndices;
}
static List<FieldCapabilitiesIndexResponse> randomIndexResponsesWithMappingHash(Map<String, List<String>> mappingHashToIndices) {
final List<FieldCapabilitiesIndexResponse> responses = new ArrayList<>();
for (Map.Entry<String, List<String>> e : mappingHashToIndices.entrySet()) {
Map<String, IndexFieldCapabilities> fieldCaps = randomFieldCaps();
var indexMode = randomFrom(IndexMode.values());
String mappingHash = e.getKey();
for (String index : e.getValue()) {
responses.add(new FieldCapabilitiesIndexResponse(index, mappingHash, fieldCaps, true, indexMode));
}
}
return responses;
}
static List<FieldCapabilitiesIndexResponse> randomIndexResponsesWithoutMappingHash() {
final List<FieldCapabilitiesIndexResponse> responses = new ArrayList<>();
int numIndices = between(0, 10);
for (int i = 0; i < numIndices; i++) {
String index = "index_without_mapping_hash_" + i;
var indexMode = randomFrom(IndexMode.values());
responses.add(new FieldCapabilitiesIndexResponse(index, null, randomFieldCaps(), randomBoolean(), indexMode));
}
return responses;
}
}
|
FieldCapabilitiesIndexResponseTests
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/MapAccessTests.java
|
{
"start": 1275,
"end": 3926
}
|
class ____ extends AbstractExpressionTests {
@Test
void directMapAccess() {
evaluate("testMap.get('monday')", "montag", String.class);
}
@Test
void mapAccessThroughIndexer() {
evaluate("testMap['monday']", "montag", String.class);
}
@Test
void mapAccessThroughIndexerForNonexistentKey() {
evaluate("testMap['bogus']", null, String.class);
}
@Test
void variableMapAccess() {
var parser = new SpelExpressionParser();
var ctx = TestScenarioCreator.getTestEvaluationContext();
ctx.setVariable("day", "saturday");
var expr = parser.parseExpression("testMap[#day]");
assertThat(expr.getValue(ctx, String.class)).isEqualTo("samstag");
}
@Test
void mapAccessOnRoot() {
var map = Map.of("key", "value");
var parser = new SpelExpressionParser();
var expr = parser.parseExpression("#root['key']");
assertThat(expr.getValue(map)).isEqualTo("value");
}
@Test
void mapAccessOnProperty() {
var properties = Map.of("key", "value");
var bean = new TestBean(null, new TestBean(properties, null));
var parser = new SpelExpressionParser();
var expr = parser.parseExpression("nestedBean.properties['key']");
assertThat(expr.getValue(bean)).isEqualTo("value");
}
@Test
void mapAccessor() {
var parser = new SpelExpressionParser();
var ctx = TestScenarioCreator.getTestEvaluationContext();
ctx.addPropertyAccessor(new MapAccessor());
var expr1 = parser.parseExpression("testMap.monday");
assertThat(expr1.getValue(ctx, String.class)).isEqualTo("montag");
var expr2 = parser.parseExpression("testMap.bogus");
assertThatExceptionOfType(SpelEvaluationException.class)
.isThrownBy(() -> expr2.getValue(ctx, String.class))
.satisfies(ex -> assertThat(ex.getMessageCode()).isEqualTo(PROPERTY_OR_FIELD_NOT_READABLE));
}
@Test
void nullAwareMapAccessor() {
var parser = new SpelExpressionParser();
var ctx = TestScenarioCreator.getTestEvaluationContext();
ctx.addPropertyAccessor(new NullAwareMapAccessor());
var expr = parser.parseExpression("testMap.monday");
assertThat(expr.getValue(ctx, String.class)).isEqualTo("montag");
// Unlike MapAccessor, NullAwareMapAccessor returns null for a nonexistent key.
expr = parser.parseExpression("testMap.bogus");
assertThat(expr.getValue(ctx, String.class)).isNull();
}
record TestBean(Map<String, String> properties, TestBean nestedBean) {
}
/**
* In contrast to the standard {@link MapAccessor}, {@code NullAwareMapAccessor}
* reports that it can read any map (ignoring whether the map actually contains
* an entry for the given key) and returns {@code null} for a nonexistent key.
*/
private static
|
MapAccessTests
|
java
|
grpc__grpc-java
|
census/src/main/java/io/grpc/census/internal/DeprecatedCensusConstants.java
|
{
"start": 864,
"end": 979
}
|
class ____ the deprecated OpenCensus constants. */
@SuppressWarnings("deprecation")
@VisibleForTesting
public final
|
for
|
java
|
spring-projects__spring-framework
|
spring-orm/src/test/java/org/springframework/orm/jpa/support/PersistenceInjectionTests.java
|
{
"start": 36172,
"end": 36337
}
|
class ____ {
@PersistenceContext(type = PersistenceContextType.EXTENDED)
private EntityManager em;
}
public static
|
DefaultPrivatePersistenceContextFieldExtended
|
java
|
spring-projects__spring-boot
|
module/spring-boot-quartz/src/main/java/org/springframework/boot/quartz/autoconfigure/JobStoreType.java
|
{
"start": 792,
"end": 910
}
|
enum ____ {
/**
* Store jobs in memory.
*/
MEMORY,
/**
* Store jobs in the database.
*/
JDBC
}
|
JobStoreType
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/classnamecollision/Something.java
|
{
"start": 203,
"end": 319
}
|
class ____ extends org.hibernate.processor.test.classnamecollision.somewhere.Something {
String alphaValue;
}
|
Something
|
java
|
elastic__elasticsearch
|
modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java
|
{
"start": 30929,
"end": 31031
}
|
interface ____ {
String getEnv(String name);
}
@FunctionalInterface
|
SystemEnvironment
|
java
|
elastic__elasticsearch
|
x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java
|
{
"start": 629,
"end": 2351
}
|
class ____ extends ActionResponse implements ToXContentObject {
private final long selfCount;
private final long totalCount;
private final double annualCo2Tons;
private final double annualCostsUsd;
private final List<TopNFunction> topNFunctions;
public GetTopNFunctionsResponse(
long selfCount,
long totalCount,
double annualCo2Tons,
double annualCostsUsd,
List<TopNFunction> topNFunctions
) {
this.selfCount = selfCount;
this.totalCount = totalCount;
this.annualCo2Tons = annualCo2Tons;
this.annualCostsUsd = annualCostsUsd;
this.topNFunctions = topNFunctions;
}
@Override
public void writeTo(StreamOutput out) {
TransportAction.localOnly();
}
public long getSelfCount() {
return selfCount;
}
public long getTotalCount() {
return totalCount;
}
public double getAnnualCo2Tons() {
return annualCo2Tons;
}
public double getAnnualCostsUsd() {
return annualCostsUsd;
}
public List<TopNFunction> getTopN() {
return topNFunctions;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("self_count", selfCount);
builder.field("total_count", totalCount);
builder.field("self_annual_co2_tons").rawValue(NumberUtils.doubleToString(annualCo2Tons));
builder.field("self_annual_cost_usd").rawValue(NumberUtils.doubleToString(annualCostsUsd));
builder.xContentList("topn", topNFunctions);
builder.endObject();
return builder;
}
}
|
GetTopNFunctionsResponse
|
java
|
google__guice
|
core/test/com/google/inject/spi/ElementsTest.java
|
{
"start": 50699,
"end": 50860
}
|
class ____ implements Provider<List<Object>> {
@Override
public List<Object> get() {
return new ArrayList<>();
}
}
private static
|
ListProvider
|
java
|
spring-projects__spring-boot
|
build-plugin/spring-boot-maven-plugin/src/intTest/projects/aot-test/src/test/java/org/test/SampleApplicationTests.java
|
{
"start": 1061,
"end": 1227
}
|
class ____ {
@Autowired
private MyBean myBean;
@Test
void contextLoads() {
assertThat(this.myBean).isNotNull();
}
@Configuration
static
|
SampleApplicationTests
|
java
|
apache__logging-log4j2
|
log4j-1.2-api/src/test/java/org/apache/log4j/LoggerTest.java
|
{
"start": 20067,
"end": 20521
}
|
class ____ extends AbstractAppender {
private static final long serialVersionUID = 1L;
int counter;
CountingAppender() {
super("Counter", null, null, true, Property.EMPTY_ARRAY);
counter = 0;
}
@Override
public void append(final LogEvent event) {
counter++;
}
public boolean requiresLayout() {
return true;
}
}
}
|
CountingAppender
|
java
|
google__guava
|
guava/src/com/google/common/base/Supplier.java
|
{
"start": 992,
"end": 1064
}
|
class ____ common suppliers and related utilities.
*
* <p>As this
|
provides
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/CachedRecordStore.java
|
{
"start": 1659,
"end": 8511
}
|
class ____<R extends BaseRecord>
extends RecordStore<R> implements StateStoreCache {
private static final Logger LOG =
LoggerFactory.getLogger(CachedRecordStore.class);
/** Prevent loading the cache more than once every 500 ms. */
private static final long MIN_UPDATE_MS = 500;
/** Cached entries. */
private List<R> records = new ArrayList<>();
/** Time stamp of the cached entries. */
private long timestamp = -1;
/** If the cache is initialized. */
private boolean initialized = false;
/** Last time the cache was updated. */
private long lastUpdate = -1;
/** Lock to access the memory cache. */
private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private final Lock readLock = readWriteLock.readLock();
private final Lock writeLock = readWriteLock.writeLock();
/** If it should override the expired values when loading the cache. */
private boolean override = false;
/**
* Create a new cached record store.
*
* @param clazz Class of the record to store.
* @param driver State Store driver.
*/
protected CachedRecordStore(Class<R> clazz, StateStoreDriver driver) {
this(clazz, driver, false);
}
/**
* Create a new cached record store.
*
* @param clazz Class of the record to store.
* @param driver State Store driver.
* @param over If the entries should be overridden if they expire
*/
protected CachedRecordStore(
Class<R> clazz, StateStoreDriver driver, boolean over) {
super(clazz, driver);
this.override = over;
}
/**
* Check that the cache of the State Store information is available.
*
* @throws StateStoreUnavailableException If the cache is not initialized.
*/
private void checkCacheAvailable() throws StateStoreUnavailableException {
if (!getDriver().isDriverReady() || !this.initialized) {
throw new StateStoreUnavailableException(
"Cached State Store not initialized, " +
getRecordClass().getSimpleName() + " records not valid");
}
}
@Override
public boolean loadCache(boolean force) throws IOException {
// Prevent loading the cache too frequently
if (force || isUpdateTime()) {
List<R> newRecords = null;
long t = -1;
long startTime = Time.monotonicNow();
try {
QueryResult<R> result = getDriver().get(getRecordClass());
newRecords = result.getRecords();
t = result.getTimestamp();
// If we have any expired record, update the State Store
if (this.override) {
overrideExpiredRecords(result);
}
} catch (IOException e) {
LOG.error("Cannot get \"{}\" records from the State Store",
getRecordClass().getSimpleName());
return false;
}
// Update cache atomically
writeLock.lock();
try {
this.records.clear();
this.records.addAll(newRecords);
this.timestamp = t;
this.initialized = true;
} finally {
writeLock.unlock();
}
// Update the metrics for the cache State Store size
StateStoreMetrics metrics = getDriver().getMetrics();
if (metrics != null) {
String recordName = getRecordClass().getSimpleName();
metrics.setCacheLoading(recordName, Time.monotonicNow() - startTime);
metrics.setCacheSize(recordName, this.records.size());
}
lastUpdate = Time.monotonicNow();
}
return true;
}
/**
* Check if it's time to update the cache. Update it was never updated.
*
* @return If it's time to update this cache.
*/
private boolean isUpdateTime() {
return Time.monotonicNow() - lastUpdate > MIN_UPDATE_MS;
}
/**
* Updates the state store with any record overrides we detected, such as an
* expired state. If an expired record exists beyond deletion time, it is
* removed.
*
* @param query RecordQueryResult containing the data to be inspected.
* @throws IOException If the values cannot be updated.
*/
public void overrideExpiredRecords(QueryResult<R> query) throws IOException {
List<R> commitRecords = new ArrayList<>();
List<R> deleteRecords = new ArrayList<>();
List<R> newRecords = query.getRecords();
long currentDriverTime = query.getTimestamp();
if (newRecords == null || currentDriverTime <= 0) {
LOG.error("Cannot check overrides for record");
return;
}
for (R record : newRecords) {
if (record.shouldBeDeleted(currentDriverTime)) {
String recordName = StateStoreUtils.getRecordName(record.getClass());
LOG.info("State Store record to delete {}: {}", recordName, record);
deleteRecords.add(record);
} else if (!record.isExpired() && record.checkExpired(currentDriverTime)) {
String recordName = StateStoreUtils.getRecordName(record.getClass());
LOG.info("Override State Store record {}: {}", recordName, record);
commitRecords.add(record);
}
}
List<R> removedRecords = getDriver().handleOverwriteAndDelete(commitRecords, deleteRecords);
// In driver async mode, driver will return null and skip the next block.
// newRecords might be stale as a result but will sort itself out the next override cycle.
if (removedRecords != null && !removedRecords.isEmpty()) {
newRecords.removeAll(removedRecords);
}
}
/**
* Updates the state store with any record overrides we detected, such as an
* expired state.
*
* @param record record to be updated.
* @throws IOException If the values cannot be updated.
*/
public void overrideExpiredRecord(R record) throws IOException {
List<R> newRecords = new ArrayList<>();
newRecords.add(record);
long time = getDriver().getTime();
QueryResult<R> query = new QueryResult<>(newRecords, time);
overrideExpiredRecords(query);
}
/**
* Get all the cached records.
*
* @return Copy of the cached records.
* @throws StateStoreUnavailableException If the State store is not available.
*/
public List<R> getCachedRecords() throws StateStoreUnavailableException {
checkCacheAvailable();
List<R> ret = new LinkedList<R>();
this.readLock.lock();
try {
ret.addAll(this.records);
} finally {
this.readLock.unlock();
}
return ret;
}
/**
* Get all the cached records and the time stamp of the cache.
*
* @return Copy of the cached records and the time stamp.
* @throws StateStoreUnavailableException If the State store is not available.
*/
protected QueryResult<R> getCachedRecordsAndTimeStamp()
throws StateStoreUnavailableException {
checkCacheAvailable();
this.readLock.lock();
try {
return new QueryResult<R>(this.records, this.timestamp);
} finally {
this.readLock.unlock();
}
}
}
|
CachedRecordStore
|
java
|
quarkusio__quarkus
|
extensions/reactive-datasource/runtime/src/main/java/io/quarkus/reactive/datasource/runtime/DataSourceReactiveRuntimeConfig.java
|
{
"start": 770,
"end": 5334
}
|
interface ____ {
/**
* Whether prepared statements should be cached on the client side.
*/
@ConfigDocDefault("true for PostgreSQL/MySQL/MariaDB/Db2, false otherwise")
Optional<Boolean> cachePreparedStatements();
/**
* The datasource URLs.
* <p>
* If multiple values are set, this datasource will create a pool with a list of servers instead of a single server.
* The pool uses round-robin load balancing for server selection during connection establishment.
* Note that certain drivers might not accommodate multiple values in this context.
*/
Optional<List<@WithConverter(TrimmedStringConverter.class) String>> url();
/**
* The datasource pool maximum size.
*/
@WithDefault("20")
int maxSize();
/**
* When a new connection object is created, the pool assigns it an event loop.
* <p>
* When {@code #event-loop-size} is set to a strictly positive value, the pool assigns as many event loops as specified, in
* a round-robin fashion.
* By default, the number of event loops configured or calculated by Quarkus is used.
* If {@code #event-loop-size} is set to zero or a negative value, the pool assigns the current event loop to the new
* connection.
*/
OptionalInt eventLoopSize();
/**
* Whether all server certificates should be trusted.
*/
@WithDefault("false")
boolean trustAll();
/**
* Trust configuration in the PEM format.
* <p>
* When enabled, {@code #trust-certificate-jks} and {@code #trust-certificate-pfx} must be disabled.
*/
PemTrustCertConfiguration trustCertificatePem();
/**
* Trust configuration in the JKS format.
* <p>
* When enabled, {@code #trust-certificate-pem} and {@code #trust-certificate-pfx} must be disabled.
*/
JksConfiguration trustCertificateJks();
/**
* Trust configuration in the PFX format.
* <p>
* When enabled, {@code #trust-certificate-jks} and {@code #trust-certificate-pem} must be disabled.
*/
PfxConfiguration trustCertificatePfx();
/**
* Key/cert configuration in the PEM format.
* <p>
* When enabled, {@code key-certificate-jks} and {@code #key-certificate-pfx} must be disabled.
*/
PemKeyCertConfiguration keyCertificatePem();
/**
* Key/cert configuration in the JKS format.
* <p>
* When enabled, {@code #key-certificate-pem} and {@code #key-certificate-pfx} must be disabled.
*/
JksConfiguration keyCertificateJks();
/**
* Key/cert configuration in the PFX format.
* <p>
* When enabled, {@code key-certificate-jks} and {@code #key-certificate-pem} must be disabled.
*/
PfxConfiguration keyCertificatePfx();
/**
* The number of reconnection attempts when a pooled connection cannot be established on first try.
*/
@WithDefault("0")
int reconnectAttempts();
/**
* The interval between reconnection attempts when a pooled connection cannot be established on first try.
*/
@WithDefault("PT1S")
Duration reconnectInterval();
/**
* The hostname verification algorithm to use in case the server's identity should be checked.
* Should be {@code HTTPS}, {@code LDAPS} or {@code NONE}.
* {@code NONE} is the default value and disables the verification.
*/
@WithDefault("NONE")
String hostnameVerificationAlgorithm();
/**
* The maximum time a connection remains unused in the pool before it is closed.
*/
@ConfigDocDefault("no timeout")
Optional<Duration> idleTimeout();
/**
* The maximum time a connection remains in the pool, after which it will be closed
* upon return and replaced as necessary.
*/
@ConfigDocDefault("no timeout")
Optional<Duration> maxLifetime();
/**
* Set to true to share the pool among datasources.
* There can be multiple shared pools distinguished by <name>name</name>, when no specific name is set,
* the <code>__vertx.DEFAULT</code> name is used.
*/
@WithDefault("false")
boolean shared();
/**
* Set the pool name, used when the pool is shared among datasources, otherwise ignored.
*/
Optional<String> name();
/**
* Other unspecified properties to be passed through the Reactive SQL Client directly to the database when new connections
* are initiated.
*/
@ConfigDocMapKey("property-key")
Map<String, String> additionalProperties();
}
|
DataSourceReactiveRuntimeConfig
|
java
|
elastic__elasticsearch
|
test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java
|
{
"start": 1403,
"end": 48051
}
|
class ____ extends AbstractClientYamlTestFragmentParserTestCase {
public void testParseTestSetupWithSkip() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
---
setup:
- skip:
known_issues:
- cluster_feature: "feature_a"
fixed_by: "feature_a_fix"
reason: "Bug introduced with feature a, fixed with feature a fix"
---
date:
- skip:
cluster_features: "tsdb_indexing"
reason: tsdb indexing changed in 8.2.0
- do:
indices.get_mapping:
index: test_index
- match: {test_index.test_type.properties.text.type: string}
- match: {test_index.test_type.properties.text.analyzer: whitespace}
""");
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser);
assertThat(restTestSuite, notNullValue());
assertThat(restTestSuite.getName(), equalTo(getTestName()));
assertThat(restTestSuite.getFile().isPresent(), equalTo(false));
assertThat(restTestSuite.getSetupSection(), notNullValue());
assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(false));
assertThat(restTestSuite.getSetupSection().getPrerequisiteSection().isEmpty(), equalTo(false));
assertThat(restTestSuite.getSetupSection().getExecutableSections().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().size(), equalTo(1));
assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("date"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(false));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(3));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class));
DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0);
assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_mapping"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(1));
assertThat(doSection.getApiCallSection().getParams().get("index"), equalTo("test_index"));
}
public void testParseTestSetupTeardownAndSections() throws Exception {
final boolean includeSetup = randomBoolean();
final boolean includeTeardown = randomBoolean();
StringBuilder testSpecBuilder = new StringBuilder();
if (includeSetup) {
testSpecBuilder.append("""
---
setup:
- do:
indices.create:
index: test_index
""");
}
if (includeTeardown) {
testSpecBuilder.append("""
---
teardown:
- do:
indices.delete:
index: test_index
""");
}
parser = createParser(YamlXContent.yamlXContent, testSpecBuilder + """
---
"Get index mapping":
- do:
indices.get_mapping:
index: test_index
- match: {test_index.test_type.properties.text.type: string}
- match: {test_index.test_type.properties.text.analyzer: whitespace}
---
"Get type mapping - pre 6.0":
- skip:
cluster_features: "feature_in_6.0"
reason: "for newer versions the index name is always returned"
- do:
indices.get_mapping:
index: test_index
type: test_type
- match: {test_type.properties.text.type: string}
- match: {test_type.properties.text.analyzer: whitespace}
""");
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser);
assertThat(restTestSuite, notNullValue());
assertThat(restTestSuite.getName(), equalTo(getTestName()));
assertThat(restTestSuite.getFile().isPresent(), equalTo(false));
assertThat(restTestSuite.getSetupSection(), notNullValue());
if (includeSetup) {
assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(false));
assertThat(restTestSuite.getSetupSection().getPrerequisiteSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getSetupSection().getExecutableSections().size(), equalTo(1));
final ExecutableSection maybeDoSection = restTestSuite.getSetupSection().getExecutableSections().get(0);
assertThat(maybeDoSection, instanceOf(DoSection.class));
final DoSection doSection = (DoSection) maybeDoSection;
assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.create"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(1));
assertThat(doSection.getApiCallSection().getParams().get("index"), equalTo("test_index"));
} else {
assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(true));
}
assertThat(restTestSuite.getTeardownSection(), notNullValue());
if (includeTeardown) {
assertThat(restTestSuite.getTeardownSection().isEmpty(), equalTo(false));
assertThat(restTestSuite.getTeardownSection().getPrerequisiteSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTeardownSection().getDoSections().size(), equalTo(1));
assertThat(
((DoSection) restTestSuite.getTeardownSection().getDoSections().get(0)).getApiCallSection().getApi(),
equalTo("indices.delete")
);
assertThat(
((DoSection) restTestSuite.getTeardownSection().getDoSections().get(0)).getApiCallSection().getParams().size(),
equalTo(1)
);
assertThat(
((DoSection) restTestSuite.getTeardownSection().getDoSections().get(0)).getApiCallSection().getParams().get("index"),
equalTo("test_index")
);
} else {
assertThat(restTestSuite.getTeardownSection().isEmpty(), equalTo(true));
}
assertThat(restTestSuite.getTestSections().size(), equalTo(2));
assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Get index mapping"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(3));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class));
DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0);
assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_mapping"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(1));
assertThat(doSection.getApiCallSection().getParams().get("index"), equalTo("test_index"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(1), instanceOf(MatchAssertion.class));
MatchAssertion matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(1);
assertThat(matchAssertion.getField(), equalTo("test_index.test_type.properties.text.type"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("string"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(2), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(2);
assertThat(matchAssertion.getField(), equalTo("test_index.test_type.properties.text.analyzer"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("whitespace"));
assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Get type mapping - pre 6.0"));
assertThat(restTestSuite.getTestSections().get(1).getPrerequisiteSection().isEmpty(), equalTo(false));
assertThat(
restTestSuite.getTestSections().get(1).getPrerequisiteSection().skipReason,
equalTo("for newer versions the index name is always returned")
);
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class));
doSection = (DoSection) restTestSuite.getTestSections().get(1).getExecutableSections().get(0);
assertThat(doSection.getApiCallSection().getApi(), equalTo("indices.get_mapping"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(2));
assertThat(doSection.getApiCallSection().getParams().get("index"), equalTo("test_index"));
assertThat(doSection.getApiCallSection().getParams().get("type"), equalTo("test_type"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(1), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(1).getExecutableSections().get(1);
assertThat(matchAssertion.getField(), equalTo("test_type.properties.text.type"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("string"));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(2), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(1).getExecutableSections().get(2);
assertThat(matchAssertion.getField(), equalTo("test_type.properties.text.analyzer"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("whitespace"));
}
public void testParseTestSingleTestSection() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
---
"Index with ID":
- do:
index:
index: test-weird-index-中文
type: weird.type
id: 1
body: { foo: bar }
- is_true: ok
- match: { _index: test-weird-index-中文 }
- match: { _type: weird.type }
- match: { _id: "1"}
- match: { _version: 1}
- do:
get:
index: test-weird-index-中文
type: weird.type
id: 1
- match: { _index: test-weird-index-中文 }
- match: { _type: weird.type }
- match: { _id: "1"}
- match: { _version: 1}
- match: { _source: { foo: bar }}""");
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser);
assertThat(restTestSuite, notNullValue());
assertThat(restTestSuite.getName(), equalTo(getTestName()));
assertThat(restTestSuite.getFile().isPresent(), equalTo(false));
assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().size(), equalTo(1));
assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Index with ID"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(12));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class));
DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0);
assertThat(doSection.getCatch(), nullValue());
assertThat(doSection.getApiCallSection().getApi(), equalTo("index"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(3));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(1), instanceOf(IsTrueAssertion.class));
IsTrueAssertion trueAssertion = (IsTrueAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(1);
assertThat(trueAssertion.getField(), equalTo("ok"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(2), instanceOf(MatchAssertion.class));
MatchAssertion matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(2);
assertThat(matchAssertion.getField(), equalTo("_index"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("test-weird-index-中文"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(3), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(3);
assertThat(matchAssertion.getField(), equalTo("_type"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("weird.type"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(4), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(4);
assertThat(matchAssertion.getField(), equalTo("_id"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("1"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(5), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(5);
assertThat(matchAssertion.getField(), equalTo("_version"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("1"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(6), instanceOf(DoSection.class));
doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(6);
assertThat(doSection.getCatch(), nullValue());
assertThat(doSection.getApiCallSection().getApi(), equalTo("get"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(3));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(false));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(7), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(7);
assertThat(matchAssertion.getField(), equalTo("_index"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("test-weird-index-中文"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(8), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(8);
assertThat(matchAssertion.getField(), equalTo("_type"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("weird.type"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(9), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(9);
assertThat(matchAssertion.getField(), equalTo("_id"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("1"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(10), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(10);
assertThat(matchAssertion.getField(), equalTo("_version"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("1"));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(11), instanceOf(MatchAssertion.class));
matchAssertion = (MatchAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(11);
assertThat(matchAssertion.getField(), equalTo("_source"));
assertThat(matchAssertion.getExpectedValue(), instanceOf(Map.class));
assertThat(((Map) matchAssertion.getExpectedValue()).get("foo").toString(), equalTo("bar"));
}
public void testParseTestMultipleTestSections() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
---
"Missing document (partial doc)":
- do:
catch: missing
update:
index: test_1
type: test
id: 1
body: { doc: { foo: bar } }
- do:
update:
index: test_1
type: test
id: 1
body: { doc: { foo: bar } }
ignore: 404
---
"Missing document (script)":
- do:
catch: missing
update:
index: test_1
type: test
id: 1
body:
script: "ctx._source.foo = bar"
params: { bar: 'xxx' }
- do:
update:
index: test_1
type: test
id: 1
ignore: 404
body:
script: "ctx._source.foo = bar"
params: { bar: 'xxx' }
""");
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser);
assertThat(restTestSuite, notNullValue());
assertThat(restTestSuite.getName(), equalTo(getTestName()));
assertThat(restTestSuite.getFile().isPresent(), equalTo(false));
assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().size(), equalTo(2));
assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Missing document (partial doc)"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(2));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class));
DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0);
assertThat(doSection.getCatch(), equalTo("missing"));
assertThat(doSection.getApiCallSection().getApi(), equalTo("update"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(3));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(1), instanceOf(DoSection.class));
doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(1);
assertThat(doSection.getCatch(), nullValue());
assertThat(doSection.getApiCallSection().getApi(), equalTo("update"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(4));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Missing document (script)"));
assertThat(restTestSuite.getTestSections().get(1).getPrerequisiteSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(2));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(1), instanceOf(DoSection.class));
doSection = (DoSection) restTestSuite.getTestSections().get(1).getExecutableSections().get(0);
assertThat(doSection.getCatch(), equalTo("missing"));
assertThat(doSection.getApiCallSection().getApi(), equalTo("update"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(3));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(1), instanceOf(DoSection.class));
doSection = (DoSection) restTestSuite.getTestSections().get(1).getExecutableSections().get(1);
assertThat(doSection.getCatch(), nullValue());
assertThat(doSection.getApiCallSection().getApi(), equalTo("update"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(4));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(true));
}
public void testParseTestDuplicateTestSections() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
---
"Missing document (script)":
- do:
catch: missing
update:
index: test_1
type: test
id: 1
body: { doc: { foo: bar } }
---
"Missing document (script)":
- do:
catch: missing
update:
index: test_1
type: test
id: 1
body:
script: "ctx._source.foo = bar"
params: { bar: 'xxx' }
""");
Exception e = expectThrows(
ParsingException.class,
() -> ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser)
);
assertThat(e.getMessage(), containsString("duplicate test section"));
}
public void testParseSkipOs() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
"Broken on some os":
- skip:
features: skip_os
os: ["windows95", "debian-5"]
reason: "not supported"
- do:
indices.get_mapping:
index: test_index
type: test_type
- match: {test_type.properties.text.type: string}
- match: {test_type.properties.text.analyzer: whitespace}
""");
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser);
assertThat(restTestSuite, notNullValue());
assertThat(restTestSuite.getName(), equalTo(getTestName()));
assertThat(restTestSuite.getFile().isPresent(), equalTo(false));
assertThat(restTestSuite.getTestSections().size(), equalTo(1));
assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Broken on some os"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(false));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().skipReason, containsString("not supported"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().hasYamlRunnerFeature("skip_os"), equalTo(true));
}
public void testMuteUsingAwaitsFix() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
"Mute":
- skip:
awaits_fix: bugurl
- do:
indices.get_mapping:
index: test_index
type: test_type
- match: {test_type.properties.text.type: string}
- match: {test_type.properties.text.analyzer: whitespace}
""");
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser);
assertThat(restTestSuite, notNullValue());
assertThat(restTestSuite.getName(), equalTo(getTestName()));
assertThat(restTestSuite.getFile().isPresent(), equalTo(false));
assertThat(restTestSuite.getTestSections().size(), equalTo(1));
assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Mute"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(false));
}
public void testParseSkipAndRequireClusterFeatures() throws Exception {
parser = createParser(YamlXContent.yamlXContent, """
"Broken on some os":
- skip:
known_issues:
- cluster_feature: buggy_feature
fixed_by: buggy_feature_fix
cluster_features: [unsupported-feature1, unsupported-feature2]
reason: "unsupported-features are not supported"
- requires:
cluster_features: required-feature1
reason: "required-feature1 is required"
- do:
indices.get_mapping:
index: test_index
type: test_type
- match: {test_type.properties.text.type: string}
- match: {test_type.properties.text.analyzer: whitespace}
""");
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(getTestClass().getName(), getTestName(), Optional.empty(), parser);
assertThat(restTestSuite, notNullValue());
assertThat(restTestSuite.getName(), equalTo(getTestName()));
assertThat(restTestSuite.getFile().isPresent(), equalTo(false));
assertThat(restTestSuite.getTestSections().size(), equalTo(1));
assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Broken on some os"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(false));
assertThat(
restTestSuite.getTestSections().get(0).getPrerequisiteSection().skipReason,
equalTo("unsupported-features are not supported")
);
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().requireReason, equalTo("required-feature1 is required"));
}
public void testParseFileWithSingleTestSection() throws Exception {
final Path filePath = createTempFile("tyf", ".yml");
Files.writeString(filePath, """
---
"Index with ID":
- do:
index:
index: test-weird-index-中文
type: weird.type
id: 1
body: { foo: bar }
- is_true: ok""" + "\n");
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(ExecutableSection.XCONTENT_REGISTRY, "api", filePath);
assertThat(restTestSuite, notNullValue());
assertThat(
restTestSuite.getName(),
equalTo(filePath.getFileName().toString().substring(0, filePath.getFileName().toString().lastIndexOf('.')))
);
assertThat(restTestSuite.getFile().isPresent(), equalTo(true));
assertThat(restTestSuite.getFile().get(), equalTo(filePath));
assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().size(), equalTo(1));
assertThat(restTestSuite.getTestSections().get(0).getName(), equalTo("Index with ID"));
assertThat(restTestSuite.getTestSections().get(0).getPrerequisiteSection().isEmpty(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().size(), equalTo(2));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(0), instanceOf(DoSection.class));
DoSection doSection = (DoSection) restTestSuite.getTestSections().get(0).getExecutableSections().get(0);
assertThat(doSection.getCatch(), nullValue());
assertThat(doSection.getApiCallSection().getApi(), equalTo("index"));
assertThat(doSection.getApiCallSection().getParams().size(), equalTo(3));
assertThat(doSection.getApiCallSection().hasBody(), equalTo(true));
assertThat(restTestSuite.getTestSections().get(0).getExecutableSections().get(1), instanceOf(IsTrueAssertion.class));
IsTrueAssertion trueAssertion = (IsTrueAssertion) restTestSuite.getTestSections().get(0).getExecutableSections().get(1);
assertThat(trueAssertion.getField(), equalTo("ok"));
}
public void testAddingDoWithoutSkips() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
doSection.setApiCallSection(new ApiCallSection("test"));
ClientYamlTestSection section = new ClientYamlTestSection(
new XContentLocation(0, 0),
"test",
PrerequisiteSection.EMPTY,
Collections.singletonList(doSection)
);
ClientYamlTestSuite clientYamlTestSuite = new ClientYamlTestSuite(
"api",
"name",
Optional.empty(),
SetupSection.EMPTY,
TeardownSection.EMPTY,
Collections.singletonList(section)
);
clientYamlTestSuite.validate();
}
public void testAddingDoWithWarningWithoutSkipWarnings() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
doSection.setExpectedWarningHeaders(singletonList("foo"));
doSection.setApiCallSection(new ApiCallSection("test"));
ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection);
Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate);
assertThat(e.getMessage(), containsString(Strings.format("""
api/name:
attempted to add a [do] with a [warnings] section without a corresponding ["requires": "test_runner_features": "warnings"] \
so runners that do not support the [warnings] section can skip the test at line [%d]\
""", lineNumber)));
}
public void testAddingDoWithWarningRegexWithoutSkipWarnings() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
doSection.setExpectedWarningHeadersRegex(singletonList(Pattern.compile("foo")));
doSection.setApiCallSection(new ApiCallSection("test"));
ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection);
Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate);
assertThat(e.getMessage(), containsString(Strings.format("""
api/name:
attempted to add a [do] with a [warnings_regex] section without a corresponding \
["requires": "test_runner_features": "warnings_regex"] \
so runners that do not support the [warnings_regex] section can skip the test at line [%d]\
""", lineNumber)));
}
public void testAddingDoWithAllowedWarningWithoutSkipAllowedWarnings() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
doSection.setAllowedWarningHeaders(singletonList("foo"));
doSection.setApiCallSection(new ApiCallSection("test"));
ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection);
Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate);
assertThat(e.getMessage(), containsString(Strings.format("""
api/name:
attempted to add a [do] with a [allowed_warnings] section without a corresponding ["requires": "test_runner_features": \
"allowed_warnings"] so runners that do not support the [allowed_warnings] section can skip the test at \
line [%d]\
""", lineNumber)));
}
public void testAddingDoWithAllowedWarningRegexWithoutSkipAllowedWarnings() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
doSection.setAllowedWarningHeadersRegex(singletonList(Pattern.compile("foo")));
doSection.setApiCallSection(new ApiCallSection("test"));
ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection);
Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate);
assertThat(e.getMessage(), containsString(Strings.format("""
api/name:
attempted to add a [do] with a [allowed_warnings_regex] section without a corresponding ["requires": "test_runner_features": \
"allowed_warnings_regex"] so runners that do not support the [allowed_warnings_regex] section can skip the test \
at line [%d]\
""", lineNumber)));
}
public void testAddingDoWithHeaderWithoutSkipHeaders() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
ApiCallSection apiCallSection = new ApiCallSection("test");
apiCallSection.addHeaders(Collections.singletonMap("header", "value"));
doSection.setApiCallSection(apiCallSection);
ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection);
Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate);
assertThat(e.getMessage(), containsString(Strings.format("""
api/name:
attempted to add a [do] with a [headers] section without a corresponding ["requires": "test_runner_features": "headers"] \
so runners that do not support the [headers] section can skip the test at line [%d]\
""", lineNumber)));
}
public void testAddingDoWithNodeSelectorWithoutSkipNodeSelector() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
ApiCallSection apiCall = new ApiCallSection("test");
apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS);
doSection.setApiCallSection(apiCall);
ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, doSection);
Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate);
assertThat(e.getMessage(), containsString(Strings.format("""
api/name:
attempted to add a [do] with a [node_selector] section without a corresponding \
["requires": "test_runner_features": "node_selector"] \
so runners that do not support the [node_selector] section can skip the test at line [%d]\
""", lineNumber)));
}
public void testAddingContainsWithoutSkipContains() {
int lineNumber = between(1, 10000);
ContainsAssertion containsAssertion = new ContainsAssertion(
new XContentLocation(lineNumber, 0),
randomAlphaOfLength(randomIntBetween(3, 30)),
randomDouble()
);
ClientYamlTestSuite testSuite = createTestSuite(PrerequisiteSection.EMPTY, containsAssertion);
Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate);
assertThat(e.getMessage(), containsString(Strings.format("""
api/name:
attempted to add a [contains] assertion without a corresponding ["requires": "test_runner_features": "contains"] \
so runners that do not support the [contains] assertion can skip the test at line [%d]\
""", lineNumber)));
}
public void testMultipleValidationErrors() {
int firstLineNumber = between(1, 10000);
List<ClientYamlTestSection> sections = new ArrayList<>();
{
ContainsAssertion containsAssertion = new ContainsAssertion(
new XContentLocation(firstLineNumber, 0),
randomAlphaOfLength(randomIntBetween(3, 30)),
randomDouble()
);
sections.add(
new ClientYamlTestSection(
new XContentLocation(0, 0),
"section1",
PrerequisiteSection.EMPTY,
Collections.singletonList(containsAssertion)
)
);
}
int secondLineNumber = between(1, 10000);
int thirdLineNumber = between(1, 10000);
List<ExecutableSection> doSections = new ArrayList<>();
{
DoSection doSection = new DoSection(new XContentLocation(secondLineNumber, 0));
doSection.setExpectedWarningHeaders(singletonList("foo"));
doSection.setApiCallSection(new ApiCallSection("test"));
doSections.add(doSection);
}
{
DoSection doSection = new DoSection(new XContentLocation(thirdLineNumber, 0));
ApiCallSection apiCall = new ApiCallSection("test");
apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS);
doSection.setApiCallSection(apiCall);
doSections.add(doSection);
}
sections.add(new ClientYamlTestSection(new XContentLocation(0, 0), "section2", PrerequisiteSection.EMPTY, doSections));
ClientYamlTestSuite testSuite = new ClientYamlTestSuite(
"api",
"name",
Optional.empty(),
SetupSection.EMPTY,
TeardownSection.EMPTY,
sections
);
Exception e = expectThrows(IllegalArgumentException.class, testSuite::validate);
assertEquals(Strings.format("""
api/name:
attempted to add a [contains] assertion without a corresponding \
["requires": "test_runner_features": "contains"] \
so runners that do not support the [contains] assertion can skip the test at line [%d],
attempted to add a [do] with a [warnings] section without a corresponding \
["requires": "test_runner_features": "warnings"] \
so runners that do not support the [warnings] section can skip the test at line [%d],
attempted to add a [do] with a [node_selector] section without a corresponding \
["requires": "test_runner_features": "node_selector"] \
so runners that do not support the [node_selector] section can skip the test at line [%d]\
""", firstLineNumber, secondLineNumber, thirdLineNumber), e.getMessage());
}
private static PrerequisiteSection createPrerequisiteSection(String yamlTestRunnerFeature) {
return new PrerequisiteSection(emptyList(), null, emptyList(), null, singletonList(yamlTestRunnerFeature));
}
public void testAddingDoWithWarningWithSkip() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
doSection.setExpectedWarningHeaders(singletonList("foo"));
doSection.setApiCallSection(new ApiCallSection("test"));
PrerequisiteSection prerequisiteSection = createPrerequisiteSection("warnings");
createTestSuite(prerequisiteSection, doSection).validate();
}
public void testAddingDoWithWarningRegexWithSkip() {
int lineNumber = between(1, 10000);
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
doSection.setExpectedWarningHeadersRegex(singletonList(Pattern.compile("foo")));
doSection.setApiCallSection(new ApiCallSection("test"));
PrerequisiteSection prerequisiteSection = createPrerequisiteSection("warnings_regex");
createTestSuite(prerequisiteSection, doSection).validate();
}
public void testAddingDoWithNodeSelectorWithSkip() {
int lineNumber = between(1, 10000);
PrerequisiteSection prerequisiteSection = createPrerequisiteSection("node_selector");
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
ApiCallSection apiCall = new ApiCallSection("test");
apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS);
doSection.setApiCallSection(apiCall);
createTestSuite(prerequisiteSection, doSection).validate();
}
public void testAddingDoWithHeadersWithSkip() {
int lineNumber = between(1, 10000);
PrerequisiteSection prerequisiteSection = createPrerequisiteSection("headers");
DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0));
ApiCallSection apiCallSection = new ApiCallSection("test");
apiCallSection.addHeaders(singletonMap("foo", "bar"));
doSection.setApiCallSection(apiCallSection);
createTestSuite(prerequisiteSection, doSection).validate();
}
public void testAddingContainsWithSkip() {
int lineNumber = between(1, 10000);
PrerequisiteSection prerequisiteSection = createPrerequisiteSection("contains");
ContainsAssertion containsAssertion = new ContainsAssertion(
new XContentLocation(lineNumber, 0),
randomAlphaOfLength(randomIntBetween(3, 30)),
randomDouble()
);
createTestSuite(prerequisiteSection, containsAssertion).validate();
}
public void testAddingCloseToWithSkip() {
int lineNumber = between(1, 10000);
PrerequisiteSection prerequisiteSection = createPrerequisiteSection("close_to");
CloseToAssertion closeToAssertion = new CloseToAssertion(
new XContentLocation(lineNumber, 0),
randomAlphaOfLength(randomIntBetween(3, 30)),
randomDouble(),
randomDouble()
);
createTestSuite(prerequisiteSection, closeToAssertion).validate();
}
public void testAddingIsAfterWithSkip() {
int lineNumber = between(1, 10000);
PrerequisiteSection prerequisiteSection = createPrerequisiteSection("is_after");
IsAfterAssertion isAfterAssertion = new IsAfterAssertion(
new XContentLocation(lineNumber, 0),
randomAlphaOfLength(randomIntBetween(3, 30)),
randomInstantBetween(Instant.ofEpochSecond(0L), Instant.ofEpochSecond(3000000000L))
);
createTestSuite(prerequisiteSection, isAfterAssertion).validate();
}
private static ClientYamlTestSuite createTestSuite(PrerequisiteSection prerequisiteSection, ExecutableSection executableSection) {
final SetupSection setupSection;
final TeardownSection teardownSection;
final ClientYamlTestSection clientYamlTestSection;
switch (randomIntBetween(0, 4)) {
case 0 -> {
setupSection = new SetupSection(prerequisiteSection, Collections.emptyList());
teardownSection = TeardownSection.EMPTY;
clientYamlTestSection = new ClientYamlTestSection(
new XContentLocation(0, 0),
"test",
PrerequisiteSection.EMPTY,
Collections.singletonList(executableSection)
);
}
case 1 -> {
setupSection = SetupSection.EMPTY;
teardownSection = new TeardownSection(prerequisiteSection, Collections.emptyList());
clientYamlTestSection = new ClientYamlTestSection(
new XContentLocation(0, 0),
"test",
PrerequisiteSection.EMPTY,
Collections.singletonList(executableSection)
);
}
case 2 -> {
setupSection = SetupSection.EMPTY;
teardownSection = TeardownSection.EMPTY;
clientYamlTestSection = new ClientYamlTestSection(
new XContentLocation(0, 0),
"test",
prerequisiteSection,
Collections.singletonList(executableSection)
);
}
case 3 -> {
setupSection = new SetupSection(prerequisiteSection, Collections.singletonList(executableSection));
teardownSection = TeardownSection.EMPTY;
clientYamlTestSection = new ClientYamlTestSection(
new XContentLocation(0, 0),
"test",
PrerequisiteSection.EMPTY,
randomBoolean() ? Collections.emptyList() : Collections.singletonList(executableSection)
);
}
case 4 -> {
setupSection = SetupSection.EMPTY;
teardownSection = new TeardownSection(prerequisiteSection, Collections.singletonList(executableSection));
clientYamlTestSection = new ClientYamlTestSection(
new XContentLocation(0, 0),
"test",
PrerequisiteSection.EMPTY,
randomBoolean() ? Collections.emptyList() : Collections.singletonList(executableSection)
);
}
default -> throw new UnsupportedOperationException();
}
return new ClientYamlTestSuite(
"api",
"name",
Optional.empty(),
setupSection,
teardownSection,
Collections.singletonList(clientYamlTestSection)
);
}
}
|
ClientYamlTestSuiteTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/tool/schema/internal/CheckForExistingForeignKeyTest.java
|
{
"start": 3546,
"end": 15999
}
|
class ____ implements IdentifierHelper {
@Override
public Identifier normalizeQuoting(Identifier identifier) {
return null;
}
@Override
public Identifier toIdentifier(String text) {
return null;
}
@Override
public Identifier toIdentifier(String text, boolean quoted) {
return null;
}
@Override
public Identifier applyGlobalQuoting(String text) {
return null;
}
@Override
public boolean isReservedWord(String word) {
return false;
}
@Override
public String toMetaDataCatalogName(Identifier catalogIdentifier) {
return null;
}
@Override
public String toMetaDataSchemaName(Identifier schemaIdentifier) {
return null;
}
@Override
public String toMetaDataObjectName(Identifier identifier) {
return identifier.getText();
}
}
/**
* If the key has no name it should never be found. Result is that those keys are always recreated. But keys always
* have a name so this is no problem.
*
* @throws NoSuchMethodException - error
* @throws SecurityException - error
* @throws IllegalAccessException - error
* @throws IllegalArgumentException - error
* @throws InvocationTargetException - error
*/
@Test
public void testForeignKeyWithoutName()
throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
// Get the private method
Method method = AbstractSchemaMigrator.class.getDeclaredMethod( "checkForExistingForeignKey", ForeignKey.class, TableInformation.class );
method.setAccessible( true );
// foreignKey name with same name should match
ForeignKey foreignKey = new ForeignKey();
TableInformation tableInformation = new TableInformationImpl( null, null, null, false, null );
boolean found = (boolean) method.invoke( new SchemaMigrator(), foreignKey, tableInformation );
assertFalse( found, "Key should not be found" );
}
/**
* Test key not found if tableinformation is missing.
*
* @throws NoSuchMethodException - error
* @throws SecurityException - error
* @throws IllegalAccessException - error
* @throws IllegalArgumentException - error
* @throws InvocationTargetException - error
*/
@Test
public void testMissingTableInformation()
throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
// Get the private method
Method method = AbstractSchemaMigrator.class.getDeclaredMethod( "checkForExistingForeignKey", ForeignKey.class, TableInformation.class );
method.setAccessible( true );
// foreignKey name with same name should match
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id" );
boolean found = (boolean) method.invoke( new SchemaMigrator(), foreignKey, null );
assertFalse( found, "Key should not be found" );
}
/**
* Check detection of existing foreign keys with the same name exists.
*
* @throws SecurityException - error
* @throws NoSuchMethodException - error
* @throws InvocationTargetException - error
* @throws IllegalArgumentException - error
* @throws IllegalAccessException - error
* @throws NoSuchFieldException - error
*/
@Test
public void testKeyWithSameNameExists()
throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException,
NoSuchFieldException {
// Get the private method
Method method = AbstractSchemaMigrator.class.getDeclaredMethod( "checkForExistingForeignKey", ForeignKey.class, TableInformation.class );
method.setAccessible( true );
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id" );
foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
IdentifierHelper identifierHelper = new IdentifierHelperImpl();
List<ForeignKeyInformation> fks = new ArrayList<>();
fks.add( new ForeignKeyInformationImpl( new Identifier( "objectId2id", false ), new ArrayList<>() ) );
Mockito.when( informationExtractor.getForeignKeys( Mockito.any() ) ).thenReturn( fks );
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
QualifiedTableName tableName = new QualifiedTableName( schemaName, new Identifier( "-", false ) );
TableInformation tableInformation = new TableInformationImpl( informationExtractor, identifierHelper, tableName, false, null );
// foreignKey name with same name should match
boolean found = (boolean) method.invoke( new SchemaMigrator(), foreignKey, tableInformation );
assertTrue( found, "Key should be found" );
}
/**
* Check detection of existing foreign keys with the same name exists.
*
* @throws SecurityException - error
* @throws NoSuchMethodException - error
* @throws InvocationTargetException - error
* @throws IllegalArgumentException - error
* @throws IllegalAccessException - error
* @throws NoSuchFieldException - error
*/
@Test
public void testKeyWithSameNameNotExists()
throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException,
NoSuchFieldException {
// Get the private method
Method method = AbstractSchemaMigrator.class.getDeclaredMethod( "checkForExistingForeignKey", ForeignKey.class, TableInformation.class );
method.setAccessible( true );
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" );
foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
IdentifierHelper identifierHelper = new IdentifierHelperImpl();
List<ForeignKeyInformation> fks = new ArrayList<>();
fks.add( new ForeignKeyInformationImpl( new Identifier( "objectId2id_2", false ), new ArrayList<>() ) );
Mockito.when( informationExtractor.getForeignKeys( Mockito.any() ) ).thenReturn( fks );
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
QualifiedTableName tableName = new QualifiedTableName( schemaName, new Identifier( "-", false ) );
TableInformation tableInformation = new TableInformationImpl( informationExtractor, identifierHelper, tableName, false, null );
// foreignKey name with same name should match
boolean found = (boolean) method.invoke( new SchemaMigrator(), foreignKey, tableInformation );
assertFalse( found, "Key should not be found" );
}
/**
* Check detection of existing foreign key with the same mappings for a simple mapping (table1.objectId =>
* table2.id).
*
* @throws SecurityException - error
* @throws NoSuchMethodException - error
* @throws InvocationTargetException - error
* @throws IllegalArgumentException - error
* @throws IllegalAccessException - error
* @throws NoSuchFieldException - error
*/
@Test
public void testCheckForExistingForeignKeyOne2One() throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException,
InvocationTargetException, NoSuchFieldException {
// Get the private method
Method method = AbstractSchemaMigrator.class.getDeclaredMethod( "checkForExistingForeignKey", ForeignKey.class, TableInformation.class );
method.setAccessible( true );
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" ); // Make sure the match is not successful based on key name
foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
IdentifierHelper identifierHelper = new IdentifierHelperImpl();
List<ForeignKeyInformation> fks = new ArrayList<>();
fks.add( getForeignKeyInformation( "table2", "id", "object2Id_2" ) );
Mockito.when( informationExtractor.getForeignKeys( Mockito.any() ) ).thenReturn( fks );
QualifiedTableName tableName = new QualifiedTableName( schemaName, new Identifier( "-", false ) );
TableInformation tableInformation = new TableInformationImpl( informationExtractor, identifierHelper, tableName, false, null );
AbstractSchemaMigrator schemaMigrator = new SchemaMigrator();
// Check single-column-key to single-column-key, existing (table1.objectId => table2.id)
boolean found = (boolean) method.invoke( schemaMigrator, foreignKey, tableInformation );
assertTrue( found, "Key should be found" );
}
/**
* Check detection of not existing foreign key with the same mappings for a simple mapping (table1.objectId =>
* table2.id).
*
* @throws SecurityException - error
* @throws NoSuchMethodException - error
* @throws InvocationTargetException - error
* @throws IllegalArgumentException - error
* @throws IllegalAccessException - error
* @throws NoSuchFieldException - error
*/
@Test
public void testCheckForNotExistingForeignKeyOne2One() throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException,
InvocationTargetException, NoSuchFieldException {
// Get the private method
Method method = AbstractSchemaMigrator.class.getDeclaredMethod( "checkForExistingForeignKey", ForeignKey.class, TableInformation.class );
method.setAccessible( true );
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" ); // Make sure the match is not successful based on key name
foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
IdentifierHelper identifierHelper = new IdentifierHelperImpl();
List<ForeignKeyInformation> fks = new ArrayList<>();
fks.add( getForeignKeyInformation( "table2", "blah", "blahKey_001" ) );
fks.add( getForeignKeyInformation( "table3", "id", "blahKey_002" ) );
fks.add( getForeignKeyInformation( "table3", "blah", "blahKey_003" ) );
Mockito.when( informationExtractor.getForeignKeys( Mockito.any() ) ).thenReturn( fks );
QualifiedTableName tableName = new QualifiedTableName( schemaName, new Identifier( "-", false ) );
TableInformation tableInformation = new TableInformationImpl( informationExtractor, identifierHelper, tableName, false, null );
AbstractSchemaMigrator schemaMigrator = new SchemaMigrator();
// Check single-column-key to single-column-key, existing (table1.objectId => table2.id)
boolean found = (boolean) method.invoke( schemaMigrator, foreignKey, tableInformation );
assertFalse( found, "Key should not be found" );
}
/**
* @param referencedTableName - String
* @param referencingColumnName - String
* @param keyName - String
* @return ForeignKeyInformation
*/
private ForeignKeyInformation getForeignKeyInformation(String referencedTableName, String referencingColumnName, String keyName) {
List<ColumnReferenceMapping> columnMappingList = new ArrayList<>();
ColumnInformation referencingColumnMetadata = getColumnInformation( "-", referencingColumnName );
ColumnInformation referencedColumnMetadata = getColumnInformation( referencedTableName, "-" );
ColumnReferenceMapping columnReferenceMapping = new ColumnReferenceMappingImpl( referencingColumnMetadata, referencedColumnMetadata );
columnMappingList.add( columnReferenceMapping );
ForeignKeyInformationImpl foreignKeyInformation = new ForeignKeyInformationImpl( new Identifier( keyName, false ), columnMappingList );
return foreignKeyInformation;
}
private ColumnInformation getColumnInformation(String tableName, String columnName) {
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
TableInformation containingTableInformation = new TableInformationImpl( null, null,
new QualifiedTableName( schemaName, new Identifier( tableName, false ) ), false, null );
Identifier columnIdentifier = new Identifier( columnName, false );
int typeCode = 0;
String typeName = null;
int columnSize = 0;
int decimalDigits = 0;
return new ColumnInformationImpl( containingTableInformation, columnIdentifier, typeCode, typeName, columnSize,
decimalDigits, null );
}
}
|
IdentifierHelperImpl
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/BeanWithExceptionTest.java
|
{
"start": 1474,
"end": 3760
}
|
class ____ extends ContextTestSupport {
protected MockEndpoint validEndpoint;
protected MockEndpoint invalidEndpoint;
@Test
public void testValidMessage() throws Exception {
validEndpoint.expectedMessageCount(1);
invalidEndpoint.expectedMessageCount(0);
template.send("direct:start", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody("<valid/>");
exchange.getIn().setHeader("foo", "bar");
exchange.setProperty("cheese", "old");
}
});
assertMockEndpointsSatisfied();
}
@Test
public void testInvalidMessage() throws Exception {
validEndpoint.expectedMessageCount(0);
invalidEndpoint.expectedMessageCount(1);
Exchange exchange = template.send("direct:start", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody("<invalid/>");
exchange.getIn().setHeader("foo", "notMatchedHeaderValue");
exchange.setProperty("cheese", "old");
}
});
assertNotNull(exchange.getException());
ValidationException exception = assertIsInstanceOf(ValidationException.class, exchange.getException());
assertEquals("Invalid header foo: notMatchedHeaderValue", exception.getMessage());
assertMockEndpointsSatisfied();
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
validEndpoint = resolveMandatoryEndpoint("mock:valid", MockEndpoint.class);
invalidEndpoint = resolveMandatoryEndpoint("mock:invalid", MockEndpoint.class);
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("myBean", new ValidationBean());
return answer;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
onException(ValidationException.class).to("mock:invalid");
from("direct:start").bean("myBean").to("mock:valid");
}
};
}
public static
|
BeanWithExceptionTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterTimeSeriesIT.java
|
{
"start": 1980,
"end": 18097
}
|
class ____ extends ESRestTestCase {
static final List<String> REQUIRED_CAPABILITIES = List.of("ts_command_v0");
static ElasticsearchCluster remoteCluster = Clusters.remoteCluster();
static ElasticsearchCluster localCluster = Clusters.localCluster(remoteCluster);
@ClassRule
public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster);
@Rule(order = Integer.MIN_VALUE)
public ProfileLogger profileLogger = new ProfileLogger();
@Override
protected String getTestRestCluster() {
return localCluster.getHttpAddresses();
}
record TimeSeriesDoc(
String host,
String cluster,
long timestamp,
long requestCount,
long cpu,
ByteSizeValue memory,
String clusterTag
) {}
final String localIndex = "hosts-local";
List<TimeSeriesDoc> localDocs = List.of();
final String remoteIndex = "hosts-remote";
List<TimeSeriesDoc> remoteDocs = List.of();
final String allDataIndex = "all-data";
private Boolean shouldCheckShardCounts = null;
@Before
public void setUpTimeSeriesIndices() throws Exception {
localDocs = getRandomDocs("local");
RestClient localClient = client();
createTimeSeriesIndex(localClient, localIndex);
indexTimeSeriesDocs(localClient, localIndex, localDocs);
remoteDocs = getRandomDocs("remote");
try (RestClient remoteClient = remoteClusterClient()) {
createTimeSeriesIndex(remoteClient, remoteIndex);
indexTimeSeriesDocs(remoteClient, remoteIndex, remoteDocs);
}
createTimeSeriesIndex(localClient, allDataIndex);
indexTimeSeriesDocs(localClient, allDataIndex, Stream.concat(localDocs.stream(), remoteDocs.stream()).toList());
}
@After
public void wipeIndices() throws Exception {
try (RestClient remoteClient = remoteClusterClient()) {
deleteIndex(remoteClient, remoteIndex);
}
}
private List<TimeSeriesDoc> getRandomDocs(String clusterTag) {
final List<TimeSeriesDoc> docs = new ArrayList<>();
Map<String, String> hostToClusters = new HashMap<>();
for (int i = 0; i < 5; i++) {
hostToClusters.put(clusterTag + "0" + i, randomFrom("qa", "prod"));
}
long timestamp = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-04-15T00:00:00Z");
Map<String, Integer> requestCounts = new HashMap<>();
int numDocs = between(20, 100);
for (int i = 0; i < numDocs; i++) {
List<String> hosts = randomSubsetOf(between(1, hostToClusters.size()), hostToClusters.keySet());
timestamp += between(1, 30) * 1000L;
for (String host : hosts) {
var requestCount = requestCounts.compute(host, (k, curr) -> {
if (curr == null || randomInt(100) <= 20) {
return randomIntBetween(0, 10);
} else {
return curr + randomIntBetween(1, 10);
}
});
int cpu = randomIntBetween(0, 100);
ByteSizeValue memory = ByteSizeValue.ofBytes(randomIntBetween(1024, 1024 * 1024));
docs.add(new TimeSeriesDoc(host, hostToClusters.get(host), timestamp, requestCount, cpu, memory, clusterTag));
}
}
Randomness.shuffle(docs);
return docs;
}
public void testAvg() throws Exception {
assumeTrue("TS command not supported", capabilitiesSupportedNewAndOld(REQUIRED_CAPABILITIES));
boolean includeCCSMetadata = includeCCSMetadata();
Map<String, Object> multiClusterResult = run("""
TS hosts-local,*:hosts-remote
| STATS avg_cpu = AVG(cpu), avg_memory = AVG(memory) BY cluster
| SORT cluster
""", includeCCSMetadata);
Map<String, Object> singleClusterResult = run(
"TS all-data | STATS avg_cpu = AVG(cpu), avg_memory = AVG(memory) BY cluster | SORT cluster",
includeCCSMetadata
);
assertResultMap(includeCCSMetadata, multiClusterResult, singleClusterResult);
}
public void testRateAndTBucket() throws Exception {
assumeTrue("TS command not supported", capabilitiesSupportedNewAndOld(REQUIRED_CAPABILITIES));
boolean includeCCSMetadata = includeCCSMetadata();
Map<String, Object> multiClusterResult = run("""
TS hosts-local,*:hosts-remote
| WHERE cluster == "prod"
| STATS max_rate = MAX(RATE(request_count)) BY tb = TBUCKET(5minute)
| SORT tb""", includeCCSMetadata);
Map<String, Object> singleClusterResult = run("""
TS all-data
| WHERE cluster == "prod"
| STATS max_rate = MAX(RATE(request_count)) BY tb = TBUCKET(5minute)
| SORT tb""", includeCCSMetadata);
assertResultMap(includeCCSMetadata, multiClusterResult, singleClusterResult);
}
public void testAvgOverTime() throws Exception {
assumeTrue("TS command not supported", capabilitiesSupportedNewAndOld(REQUIRED_CAPABILITIES));
boolean includeCCSMetadata = includeCCSMetadata();
Map<String, Object> multiClusterResult = run("""
TS hosts-local,*:hosts-remote
| STATS avg_cpu = SUM(AVG_OVER_TIME(cpu)), max_memory = SUM(MAX_OVER_TIME(memory)) BY tb = TBUCKET(10minutes)
| SORT tb""", includeCCSMetadata);
Map<String, Object> singleClusterResult = run("""
TS all-data
| STATS avg_cpu = SUM(AVG_OVER_TIME(cpu)), max_memory = SUM(MAX_OVER_TIME(memory)) BY tb = TBUCKET(10minutes)
| SORT tb""", includeCCSMetadata);
assertResultMap(includeCCSMetadata, multiClusterResult, singleClusterResult);
}
public void testIRate() throws Exception {
assumeTrue("TS command not supported", capabilitiesSupportedNewAndOld(REQUIRED_CAPABILITIES));
boolean includeCCSMetadata = includeCCSMetadata();
Map<String, Object> multiClusterResult = run("""
TS hosts-local,*:hosts-remote
| STATS irate_req_count = AVG(IRATE(request_count)) BY tb = TBUCKET(1minute)
| SORT tb""", includeCCSMetadata);
Map<String, Object> singleClusterResult = run("""
TS all-data
| STATS irate_req_count = AVG(IRATE(request_count)) BY tb = TBUCKET(1minute)
| SORT tb""", includeCCSMetadata);
assertResultMap(includeCCSMetadata, multiClusterResult, singleClusterResult);
}
private void createTimeSeriesIndex(RestClient client, String indexName) throws IOException {
Request createIndex = new Request("PUT", "/" + indexName);
String settings = Settings.builder()
.put("index.mode", "time_series")
.putList("index.routing_path", List.of("host", "cluster"))
.put("index.number_of_shards", randomIntBetween(1, 5))
.put("index.time_series.start_time", "2024-04-14T00:00:00Z")
.put("index.time_series.end_time", "2024-04-16T00:00:00Z")
.build()
.toString();
final String mapping = """
"properties": {
"@timestamp": { "type": "date" },
"host": { "type": "keyword", "time_series_dimension": true },
"cluster": { "type": "keyword", "time_series_dimension": true },
"cpu": { "type": "long", "time_series_metric": "gauge" },
"memory": { "type": "long", "time_series_metric": "gauge" },
"request_count": { "type": "long", "time_series_metric": "counter" },
"cluster_tag": { "type": "keyword" }
}
""";
createIndex.setJsonEntity(Strings.format("""
{
"settings": %s,
"mappings": {
%s
}
}
""", settings, mapping));
assertOK(client.performRequest(createIndex));
}
private void indexTimeSeriesDocs(RestClient client, String index, List<TimeSeriesDoc> docs) throws IOException {
logger.info("--> indexing {} time series docs to index {}", docs.size(), index);
for (TimeSeriesDoc doc : docs) {
Request createDoc = new Request("POST", "/" + index + "/_doc");
createDoc.addParameter("refresh", "true");
createDoc.setJsonEntity(Strings.format("""
{
"@timestamp": %d,
"host": "%s",
"cluster": "%s",
"cpu": %d,
"memory": %d,
"request_count": %d,
"cluster_tag": "%s"
}
""", doc.timestamp, doc.host, doc.cluster, doc.cpu, doc.memory.getBytes(), doc.requestCount, doc.clusterTag));
assertOK(client.performRequest(createDoc));
}
refresh(client, index);
}
private Map<String, Object> run(String query, boolean includeCCSMetadata) throws IOException {
var queryBuilder = new RestEsqlTestCase.RequestObjectBuilder().query(query).profile(true);
if (includeCCSMetadata) {
queryBuilder.includeCCSMetadata(true);
}
Map<String, Object> resp = runEsql(queryBuilder.build());
logger.info("--> query {} response {}", queryBuilder, resp);
return resp;
}
private Map<String, Object> runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException {
return RestEsqlTestCase.runEsqlSync(requestObject, new AssertWarnings.NoWarnings(), profileLogger);
}
private boolean checkShardCounts() {
if (shouldCheckShardCounts == null) {
try {
shouldCheckShardCounts = capabilitiesSupportedNewAndOld(List.of("correct_skipped_shard_count"));
} catch (IOException e) {
shouldCheckShardCounts = false;
}
}
return shouldCheckShardCounts;
}
private boolean capabilitiesSupportedNewAndOld(List<String> requiredCapabilities) throws IOException {
boolean isSupported = clusterHasCapability("POST", "/_query", List.of(), requiredCapabilities).orElse(false);
try (RestClient remoteClient = remoteClusterClient()) {
isSupported = isSupported
&& clusterHasCapability(remoteClient, "POST", "/_query", List.of(), requiredCapabilities).orElse(false);
}
return isSupported;
}
private void assertResultMap(boolean includeCCSMetadata, Map<String, Object> result, Map<String, Object> expectedResult) {
MapMatcher mapMatcher = getResultMatcher(
result.containsKey("is_partial"),
result.containsKey("documents_found"),
result.containsKey("start_time_in_millis")
).extraOk();
if (includeCCSMetadata) {
mapMatcher = mapMatcher.entry("_clusters", any(Map.class));
}
assertMap(
result,
mapMatcher.entry("columns", expectedResult.get("columns")).entry("values", matcherFor(expectedResult.get("values")))
);
if (includeCCSMetadata) {
assertClusterDetailsMap(result);
}
}
/**
* Converts an unknown {@link Object} to an equality {@link Matcher}
* for the public API methods that take {@linkplain Object}.
* <br/>
* This is a copy of org.elasticsearch.test.MapMatcher#matcherFor(java.lang.Object) to add support for Double values comparison
* with a given error.
*/
private static Matcher<?> matcherFor(Object value) {
return switch (value) {
case null -> nullValue();
case List<?> list -> matchesList(list);
case Map<?, ?> map -> matchesMap(map);
case Matcher<?> matcher -> matcher;
case Double doubleValue -> Matchers.closeTo(doubleValue, 0.0000001);
default -> equalTo(value);
};
}
public static ListMatcher matchesList(List<?> list) {
ListMatcher matcher = ListMatcher.matchesList();
for (Object item : list) {
matcher = matcher.item(matcherFor(matcherFor(item)));
}
return matcher;
}
private void assertClusterDetailsMap(Map<String, Object> result) {
@SuppressWarnings("unchecked")
Map<String, Object> clusters = (Map<String, Object>) result.get("_clusters");
assertThat(clusters.size(), equalTo(7));
assertThat(clusters.keySet(), equalTo(Set.of("total", "successful", "running", "skipped", "partial", "failed", "details")));
int expectedNumClusters = 2;
Set<String> expectedClusterAliases = Set.of("remote_cluster", "(local)");
assertThat(clusters.get("total"), equalTo(expectedNumClusters));
assertThat(clusters.get("successful"), equalTo(expectedNumClusters));
assertThat(clusters.get("running"), equalTo(0));
assertThat(clusters.get("skipped"), equalTo(0));
assertThat(clusters.get("partial"), equalTo(0));
assertThat(clusters.get("failed"), equalTo(0));
@SuppressWarnings("unchecked")
Map<String, Object> details = (Map<String, Object>) clusters.get("details");
assertThat(details.keySet(), equalTo(expectedClusterAliases));
@SuppressWarnings("unchecked")
Map<String, Object> remoteCluster = (Map<String, Object>) details.get("remote_cluster");
assertThat(remoteCluster.keySet(), equalTo(Set.of("status", "indices", "took", "_shards")));
assertThat(remoteCluster.get("status"), equalTo("successful"));
assertThat(remoteCluster.get("indices"), equalTo("hosts-remote"));
assertThat((Integer) remoteCluster.get("took"), greaterThanOrEqualTo(0));
@SuppressWarnings("unchecked")
Map<String, Object> remoteClusterShards = (Map<String, Object>) remoteCluster.get("_shards");
assertThat(
remoteClusterShards,
matchesMap().entry("total", greaterThanOrEqualTo(0))
.entry("successful", greaterThanOrEqualTo(0))
.entry("skipped", greaterThanOrEqualTo(0))
.entry("failed", 0)
);
if (checkShardCounts()) {
assertThat(
(int) remoteClusterShards.get("successful") + (int) remoteClusterShards.get("skipped"),
equalTo(remoteClusterShards.get("total"))
);
}
if (false == false) {
@SuppressWarnings("unchecked")
Map<String, Object> localCluster = (Map<String, Object>) details.get("(local)");
assertThat(localCluster.keySet(), equalTo(Set.of("status", "indices", "took", "_shards")));
assertThat(localCluster.get("status"), equalTo("successful"));
assertThat(localCluster.get("indices"), equalTo("hosts-local"));
assertThat((Integer) localCluster.get("took"), greaterThanOrEqualTo(0));
@SuppressWarnings("unchecked")
Map<String, Object> localClusterShards = (Map<String, Object>) localCluster.get("_shards");
assertThat(
localClusterShards,
matchesMap().entry("total", greaterThanOrEqualTo(0))
.entry("successful", greaterThanOrEqualTo(0))
.entry("skipped", greaterThanOrEqualTo(0))
.entry("failed", 0)
);
if (checkShardCounts()) {
assertThat(
(int) localClusterShards.get("successful") + (int) localClusterShards.get("skipped"),
equalTo(localClusterShards.get("total"))
);
}
}
}
private RestClient remoteClusterClient() throws IOException {
var clusterHosts = parseClusterHosts(remoteCluster.getHttpAddresses());
return buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0]));
}
private static boolean includeCCSMetadata() {
return randomBoolean();
}
}
|
MultiClusterTimeSeriesIT
|
java
|
processing__processing4
|
core/src/processing/opengl/PJOGL.java
|
{
"start": 18787,
"end": 56600
}
|
class ____ implements PGL.FontOutline {
PathIterator iter;
public FontOutline(char ch, Font font) {
char[] textArray = new char[] { ch };
FontRenderContext frc = getFontRenderContext(font);
GlyphVector gv = font.createGlyphVector(frc, textArray);
Shape shp = gv.getOutline();
iter = shp.getPathIterator(null);
}
public boolean isDone() {
return iter.isDone();
}
public int currentSegment(float[] coords) {
return iter.currentSegment(coords);
}
public void next() {
iter.next();
}
}
///////////////////////////////////////////////////////////
// Constants
static {
FALSE = GL.GL_FALSE;
TRUE = GL.GL_TRUE;
INT = GL2ES2.GL_INT;
BYTE = GL.GL_BYTE;
SHORT = GL.GL_SHORT;
FLOAT = GL.GL_FLOAT;
BOOL = GL2ES2.GL_BOOL;
UNSIGNED_INT = GL.GL_UNSIGNED_INT;
UNSIGNED_BYTE = GL.GL_UNSIGNED_BYTE;
UNSIGNED_SHORT = GL.GL_UNSIGNED_SHORT;
RGB = GL.GL_RGB;
RGBA = GL.GL_RGBA;
ALPHA = GL.GL_ALPHA;
LUMINANCE = GL.GL_LUMINANCE;
LUMINANCE_ALPHA = GL.GL_LUMINANCE_ALPHA;
UNSIGNED_SHORT_5_6_5 = GL.GL_UNSIGNED_SHORT_5_6_5;
UNSIGNED_SHORT_4_4_4_4 = GL.GL_UNSIGNED_SHORT_4_4_4_4;
UNSIGNED_SHORT_5_5_5_1 = GL.GL_UNSIGNED_SHORT_5_5_5_1;
RGBA4 = GL.GL_RGBA4;
RGB5_A1 = GL.GL_RGB5_A1;
RGB565 = GL.GL_RGB565;
RGB8 = GL.GL_RGB8;
RGBA8 = GL.GL_RGBA8;
ALPHA8 = GL.GL_ALPHA8;
READ_ONLY = GL2ES3.GL_READ_ONLY;
WRITE_ONLY = GL.GL_WRITE_ONLY;
READ_WRITE = GL2ES3.GL_READ_WRITE;
TESS_WINDING_NONZERO = GLU.GLU_TESS_WINDING_NONZERO;
TESS_WINDING_ODD = GLU.GLU_TESS_WINDING_ODD;
TESS_EDGE_FLAG = GLU.GLU_TESS_EDGE_FLAG;
GENERATE_MIPMAP_HINT = GL.GL_GENERATE_MIPMAP_HINT;
FASTEST = GL.GL_FASTEST;
NICEST = GL.GL_NICEST;
DONT_CARE = GL.GL_DONT_CARE;
VENDOR = GL.GL_VENDOR;
RENDERER = GL.GL_RENDERER;
VERSION = GL.GL_VERSION;
EXTENSIONS = GL.GL_EXTENSIONS;
SHADING_LANGUAGE_VERSION = GL2ES2.GL_SHADING_LANGUAGE_VERSION;
MAX_SAMPLES = GL.GL_MAX_SAMPLES;
SAMPLES = GL.GL_SAMPLES;
ALIASED_LINE_WIDTH_RANGE = GL.GL_ALIASED_LINE_WIDTH_RANGE;
ALIASED_POINT_SIZE_RANGE = GL.GL_ALIASED_POINT_SIZE_RANGE;
DEPTH_BITS = GL.GL_DEPTH_BITS;
STENCIL_BITS = GL.GL_STENCIL_BITS;
CCW = GL.GL_CCW;
CW = GL.GL_CW;
VIEWPORT = GL.GL_VIEWPORT;
ARRAY_BUFFER = GL.GL_ARRAY_BUFFER;
ELEMENT_ARRAY_BUFFER = GL.GL_ELEMENT_ARRAY_BUFFER;
PIXEL_PACK_BUFFER = GL2ES3.GL_PIXEL_PACK_BUFFER;
MAX_VERTEX_ATTRIBS = GL2ES2.GL_MAX_VERTEX_ATTRIBS;
STATIC_DRAW = GL.GL_STATIC_DRAW;
DYNAMIC_DRAW = GL.GL_DYNAMIC_DRAW;
STREAM_DRAW = GL2ES2.GL_STREAM_DRAW;
STREAM_READ = GL2ES3.GL_STREAM_READ;
BUFFER_SIZE = GL.GL_BUFFER_SIZE;
BUFFER_USAGE = GL.GL_BUFFER_USAGE;
POINTS = GL.GL_POINTS;
LINE_STRIP = GL.GL_LINE_STRIP;
LINE_LOOP = GL.GL_LINE_LOOP;
LINES = GL.GL_LINES;
TRIANGLE_FAN = GL.GL_TRIANGLE_FAN;
TRIANGLE_STRIP = GL.GL_TRIANGLE_STRIP;
TRIANGLES = GL.GL_TRIANGLES;
CULL_FACE = GL.GL_CULL_FACE;
FRONT = GL.GL_FRONT;
BACK = GL.GL_BACK;
FRONT_AND_BACK = GL.GL_FRONT_AND_BACK;
POLYGON_OFFSET_FILL = GL.GL_POLYGON_OFFSET_FILL;
UNPACK_ALIGNMENT = GL.GL_UNPACK_ALIGNMENT;
PACK_ALIGNMENT = GL.GL_PACK_ALIGNMENT;
TEXTURE_2D = GL.GL_TEXTURE_2D;
TEXTURE_RECTANGLE = GL2GL3.GL_TEXTURE_RECTANGLE;
TEXTURE_BINDING_2D = GL.GL_TEXTURE_BINDING_2D;
TEXTURE_BINDING_RECTANGLE = GL2GL3.GL_TEXTURE_BINDING_RECTANGLE;
MAX_TEXTURE_SIZE = GL.GL_MAX_TEXTURE_SIZE;
TEXTURE_MAX_ANISOTROPY = GL.GL_TEXTURE_MAX_ANISOTROPY_EXT;
MAX_TEXTURE_MAX_ANISOTROPY = GL.GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT;
MAX_VERTEX_TEXTURE_IMAGE_UNITS = GL2ES2.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS;
MAX_TEXTURE_IMAGE_UNITS = GL2ES2.GL_MAX_TEXTURE_IMAGE_UNITS;
MAX_COMBINED_TEXTURE_IMAGE_UNITS = GL2ES2.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS;
NUM_COMPRESSED_TEXTURE_FORMATS = GL.GL_NUM_COMPRESSED_TEXTURE_FORMATS;
COMPRESSED_TEXTURE_FORMATS = GL.GL_COMPRESSED_TEXTURE_FORMATS;
NEAREST = GL.GL_NEAREST;
LINEAR = GL.GL_LINEAR;
LINEAR_MIPMAP_NEAREST = GL.GL_LINEAR_MIPMAP_NEAREST;
LINEAR_MIPMAP_LINEAR = GL.GL_LINEAR_MIPMAP_LINEAR;
CLAMP_TO_EDGE = GL.GL_CLAMP_TO_EDGE;
REPEAT = GL.GL_REPEAT;
TEXTURE0 = GL.GL_TEXTURE0;
TEXTURE1 = GL.GL_TEXTURE1;
TEXTURE2 = GL.GL_TEXTURE2;
TEXTURE3 = GL.GL_TEXTURE3;
TEXTURE_MIN_FILTER = GL.GL_TEXTURE_MIN_FILTER;
TEXTURE_MAG_FILTER = GL.GL_TEXTURE_MAG_FILTER;
TEXTURE_WRAP_S = GL.GL_TEXTURE_WRAP_S;
TEXTURE_WRAP_T = GL.GL_TEXTURE_WRAP_T;
TEXTURE_WRAP_R = GL2ES2.GL_TEXTURE_WRAP_R;
TEXTURE_CUBE_MAP = GL.GL_TEXTURE_CUBE_MAP;
TEXTURE_CUBE_MAP_POSITIVE_X = GL.GL_TEXTURE_CUBE_MAP_POSITIVE_X;
TEXTURE_CUBE_MAP_POSITIVE_Y = GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Y;
TEXTURE_CUBE_MAP_POSITIVE_Z = GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Z;
TEXTURE_CUBE_MAP_NEGATIVE_X = GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_X;
TEXTURE_CUBE_MAP_NEGATIVE_Y = GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y;
TEXTURE_CUBE_MAP_NEGATIVE_Z = GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z;
VERTEX_SHADER = GL2ES2.GL_VERTEX_SHADER;
FRAGMENT_SHADER = GL2ES2.GL_FRAGMENT_SHADER;
INFO_LOG_LENGTH = GL2ES2.GL_INFO_LOG_LENGTH;
SHADER_SOURCE_LENGTH = GL2ES2.GL_SHADER_SOURCE_LENGTH;
COMPILE_STATUS = GL2ES2.GL_COMPILE_STATUS;
LINK_STATUS = GL2ES2.GL_LINK_STATUS;
VALIDATE_STATUS = GL2ES2.GL_VALIDATE_STATUS;
SHADER_TYPE = GL2ES2.GL_SHADER_TYPE;
DELETE_STATUS = GL2ES2.GL_DELETE_STATUS;
FLOAT_VEC2 = GL2ES2.GL_FLOAT_VEC2;
FLOAT_VEC3 = GL2ES2.GL_FLOAT_VEC3;
FLOAT_VEC4 = GL2ES2.GL_FLOAT_VEC4;
FLOAT_MAT2 = GL2ES2.GL_FLOAT_MAT2;
FLOAT_MAT3 = GL2ES2.GL_FLOAT_MAT3;
FLOAT_MAT4 = GL2ES2.GL_FLOAT_MAT4;
INT_VEC2 = GL2ES2.GL_INT_VEC2;
INT_VEC3 = GL2ES2.GL_INT_VEC3;
INT_VEC4 = GL2ES2.GL_INT_VEC4;
BOOL_VEC2 = GL2ES2.GL_BOOL_VEC2;
BOOL_VEC3 = GL2ES2.GL_BOOL_VEC3;
BOOL_VEC4 = GL2ES2.GL_BOOL_VEC4;
SAMPLER_2D = GL2ES2.GL_SAMPLER_2D;
SAMPLER_CUBE = GL2ES2.GL_SAMPLER_CUBE;
LOW_FLOAT = GL2ES2.GL_LOW_FLOAT;
MEDIUM_FLOAT = GL2ES2.GL_MEDIUM_FLOAT;
HIGH_FLOAT = GL2ES2.GL_HIGH_FLOAT;
LOW_INT = GL2ES2.GL_LOW_INT;
MEDIUM_INT = GL2ES2.GL_MEDIUM_INT;
HIGH_INT = GL2ES2.GL_HIGH_INT;
CURRENT_VERTEX_ATTRIB = GL2ES2.GL_CURRENT_VERTEX_ATTRIB;
VERTEX_ATTRIB_ARRAY_BUFFER_BINDING = GL2ES2.GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING;
VERTEX_ATTRIB_ARRAY_ENABLED = GL2ES2.GL_VERTEX_ATTRIB_ARRAY_ENABLED;
VERTEX_ATTRIB_ARRAY_SIZE = GL2ES2.GL_VERTEX_ATTRIB_ARRAY_SIZE;
VERTEX_ATTRIB_ARRAY_STRIDE = GL2ES2.GL_VERTEX_ATTRIB_ARRAY_STRIDE;
VERTEX_ATTRIB_ARRAY_TYPE = GL2ES2.GL_VERTEX_ATTRIB_ARRAY_TYPE;
VERTEX_ATTRIB_ARRAY_NORMALIZED = GL2ES2.GL_VERTEX_ATTRIB_ARRAY_NORMALIZED;
VERTEX_ATTRIB_ARRAY_POINTER = GL2ES2.GL_VERTEX_ATTRIB_ARRAY_POINTER;
BLEND = GL.GL_BLEND;
ONE = GL.GL_ONE;
ZERO = GL.GL_ZERO;
SRC_ALPHA = GL.GL_SRC_ALPHA;
DST_ALPHA = GL.GL_DST_ALPHA;
ONE_MINUS_SRC_ALPHA = GL.GL_ONE_MINUS_SRC_ALPHA;
ONE_MINUS_DST_COLOR = GL.GL_ONE_MINUS_DST_COLOR;
ONE_MINUS_SRC_COLOR = GL.GL_ONE_MINUS_SRC_COLOR;
DST_COLOR = GL.GL_DST_COLOR;
SRC_COLOR = GL.GL_SRC_COLOR;
SAMPLE_ALPHA_TO_COVERAGE = GL.GL_SAMPLE_ALPHA_TO_COVERAGE;
SAMPLE_COVERAGE = GL.GL_SAMPLE_COVERAGE;
KEEP = GL.GL_KEEP;
REPLACE = GL.GL_REPLACE;
INCR = GL.GL_INCR;
DECR = GL.GL_DECR;
INVERT = GL.GL_INVERT;
INCR_WRAP = GL.GL_INCR_WRAP;
DECR_WRAP = GL.GL_DECR_WRAP;
NEVER = GL.GL_NEVER;
ALWAYS = GL.GL_ALWAYS;
EQUAL = GL.GL_EQUAL;
LESS = GL.GL_LESS;
LEQUAL = GL.GL_LEQUAL;
GREATER = GL.GL_GREATER;
GEQUAL = GL.GL_GEQUAL;
NOTEQUAL = GL.GL_NOTEQUAL;
FUNC_ADD = GL.GL_FUNC_ADD;
FUNC_MIN = GL2ES3.GL_MIN;
FUNC_MAX = GL2ES3.GL_MAX;
FUNC_REVERSE_SUBTRACT = GL.GL_FUNC_REVERSE_SUBTRACT;
FUNC_SUBTRACT = GL.GL_FUNC_SUBTRACT;
DITHER = GL.GL_DITHER;
CONSTANT_COLOR = GL2ES2.GL_CONSTANT_COLOR;
CONSTANT_ALPHA = GL2ES2.GL_CONSTANT_ALPHA;
ONE_MINUS_CONSTANT_COLOR = GL2ES2.GL_ONE_MINUS_CONSTANT_COLOR;
ONE_MINUS_CONSTANT_ALPHA = GL2ES2.GL_ONE_MINUS_CONSTANT_ALPHA;
SRC_ALPHA_SATURATE = GL.GL_SRC_ALPHA_SATURATE;
SCISSOR_TEST = GL.GL_SCISSOR_TEST;
STENCIL_TEST = GL.GL_STENCIL_TEST;
DEPTH_TEST = GL.GL_DEPTH_TEST;
DEPTH_WRITEMASK = GL.GL_DEPTH_WRITEMASK;
COLOR_BUFFER_BIT = GL.GL_COLOR_BUFFER_BIT;
DEPTH_BUFFER_BIT = GL.GL_DEPTH_BUFFER_BIT;
STENCIL_BUFFER_BIT = GL.GL_STENCIL_BUFFER_BIT;
FRAMEBUFFER = GL.GL_FRAMEBUFFER;
COLOR_ATTACHMENT0 = GL.GL_COLOR_ATTACHMENT0;
COLOR_ATTACHMENT1 = GL2ES2.GL_COLOR_ATTACHMENT1;
COLOR_ATTACHMENT2 = GL2ES2.GL_COLOR_ATTACHMENT2;
COLOR_ATTACHMENT3 = GL2ES2.GL_COLOR_ATTACHMENT3;
RENDERBUFFER = GL.GL_RENDERBUFFER;
DEPTH_ATTACHMENT = GL.GL_DEPTH_ATTACHMENT;
STENCIL_ATTACHMENT = GL.GL_STENCIL_ATTACHMENT;
READ_FRAMEBUFFER = GL.GL_READ_FRAMEBUFFER;
DRAW_FRAMEBUFFER = GL.GL_DRAW_FRAMEBUFFER;
DEPTH24_STENCIL8 = GL.GL_DEPTH24_STENCIL8;
DEPTH_COMPONENT = GL2ES2.GL_DEPTH_COMPONENT;
DEPTH_COMPONENT16 = GL.GL_DEPTH_COMPONENT16;
DEPTH_COMPONENT24 = GL.GL_DEPTH_COMPONENT24;
DEPTH_COMPONENT32 = GL.GL_DEPTH_COMPONENT32;
STENCIL_INDEX = GL2ES2.GL_STENCIL_INDEX;
STENCIL_INDEX1 = GL.GL_STENCIL_INDEX1;
STENCIL_INDEX4 = GL.GL_STENCIL_INDEX4;
STENCIL_INDEX8 = GL.GL_STENCIL_INDEX8;
DEPTH_STENCIL = GL.GL_DEPTH_STENCIL;
FRAMEBUFFER_COMPLETE = GL.GL_FRAMEBUFFER_COMPLETE;
FRAMEBUFFER_UNDEFINED = GL2ES3.GL_FRAMEBUFFER_UNDEFINED;
FRAMEBUFFER_INCOMPLETE_ATTACHMENT = GL.GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT;
FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT = GL.GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT;
FRAMEBUFFER_INCOMPLETE_DIMENSIONS = GL.GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS;
FRAMEBUFFER_INCOMPLETE_FORMATS = GL.GL_FRAMEBUFFER_INCOMPLETE_FORMATS;
FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER = GL2GL3.GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER;
FRAMEBUFFER_INCOMPLETE_READ_BUFFER = GL2GL3.GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER;
FRAMEBUFFER_UNSUPPORTED = GL.GL_FRAMEBUFFER_UNSUPPORTED;
FRAMEBUFFER_INCOMPLETE_MULTISAMPLE = GL.GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE;
FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS = GL3ES3.GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS;
FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE = GL.GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE;
FRAMEBUFFER_ATTACHMENT_OBJECT_NAME = GL.GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME;
FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL = GL.GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL;
FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE = GL.GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE;
RENDERBUFFER_WIDTH = GL.GL_RENDERBUFFER_WIDTH;
RENDERBUFFER_HEIGHT = GL.GL_RENDERBUFFER_HEIGHT;
RENDERBUFFER_RED_SIZE = GL.GL_RENDERBUFFER_RED_SIZE;
RENDERBUFFER_GREEN_SIZE = GL.GL_RENDERBUFFER_GREEN_SIZE;
RENDERBUFFER_BLUE_SIZE = GL.GL_RENDERBUFFER_BLUE_SIZE;
RENDERBUFFER_ALPHA_SIZE = GL.GL_RENDERBUFFER_ALPHA_SIZE;
RENDERBUFFER_DEPTH_SIZE = GL.GL_RENDERBUFFER_DEPTH_SIZE;
RENDERBUFFER_STENCIL_SIZE = GL.GL_RENDERBUFFER_STENCIL_SIZE;
RENDERBUFFER_INTERNAL_FORMAT = GL.GL_RENDERBUFFER_INTERNAL_FORMAT;
MULTISAMPLE = GL.GL_MULTISAMPLE;
LINE_SMOOTH = GL.GL_LINE_SMOOTH;
POLYGON_SMOOTH = GL2GL3.GL_POLYGON_SMOOTH;
SYNC_GPU_COMMANDS_COMPLETE = GL3ES3.GL_SYNC_GPU_COMMANDS_COMPLETE;
ALREADY_SIGNALED = GL3ES3.GL_ALREADY_SIGNALED;
CONDITION_SATISFIED = GL3ES3.GL_CONDITION_SATISFIED;
}
///////////////////////////////////////////////////////////
// Special Functions
@Override
public void flush() {
gl.glFlush();
}
@Override
public void finish() {
gl.glFinish();
}
@Override
public void hint(int target, int hint) {
gl.glHint(target, hint);
}
///////////////////////////////////////////////////////////
// State and State Requests
@Override
public void enable(int value) {
if (-1 < value) {
gl.glEnable(value);
}
}
@Override
public void disable(int value) {
if (-1 < value) {
gl.glDisable(value);
}
}
@Override
public void getBooleanv(int value, IntBuffer data) {
if (-1 < value) {
if (byteBuffer.capacity() < data.capacity()) {
byteBuffer = allocateDirectByteBuffer(data.capacity());
}
gl.glGetBooleanv(value, byteBuffer);
for (int i = 0; i < data.capacity(); i++) {
data.put(i, byteBuffer.get(i));
}
} else {
fillIntBuffer(data, 0, data.capacity() - 1, 0);
}
}
@Override
public void getIntegerv(int value, IntBuffer data) {
if (-1 < value) {
gl.glGetIntegerv(value, data);
} else {
fillIntBuffer(data, 0, data.capacity() - 1, 0);
}
}
@Override
public void getFloatv(int value, FloatBuffer data) {
if (-1 < value) {
gl.glGetFloatv(value, data);
} else {
fillFloatBuffer(data, 0, data.capacity() - 1, 0);
}
}
@Override
public boolean isEnabled(int value) {
return gl.glIsEnabled(value);
}
@Override
public String getString(int name) {
return gl.glGetString(name);
}
///////////////////////////////////////////////////////////
// Error Handling
@Override
public int getError() {
return gl.glGetError();
}
@Override
public String errorString(int err) {
return glu.gluErrorString(err);
}
//////////////////////////////////////////////////////////////////////////////
// Buffer Objects
@Override
public void genBuffers(int n, IntBuffer buffers) {
gl.glGenBuffers(n, buffers);
}
@Override
public void deleteBuffers(int n, IntBuffer buffers) {
gl.glDeleteBuffers(n, buffers);
}
@Override
public void bindBuffer(int target, int buffer) {
gl.glBindBuffer(target, buffer);
}
@Override
public void bufferData(int target, int size, Buffer data, int usage) {
gl.glBufferData(target, size, data, usage);
}
@Override
public void bufferSubData(int target, int offset, int size, Buffer data) {
gl.glBufferSubData(target, offset, size, data);
}
@Override
public void isBuffer(int buffer) {
gl.glIsBuffer(buffer);
}
@Override
public void getBufferParameteriv(int target, int value, IntBuffer data) {
gl.glGetBufferParameteriv(target, value, data);
}
@Override
public ByteBuffer mapBuffer(int target, int access) {
return gl2.glMapBuffer(target, access);
}
@Override
public ByteBuffer mapBufferRange(int target, int offset, int length, int access) {
if (gl2x != null) {
return gl2x.glMapBufferRange(target, offset, length, access);
} else if (gl3 != null) {
return gl3.glMapBufferRange(target, offset, length, access);
} else {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "glMapBufferRange()"));
}
}
@Override
public void unmapBuffer(int target) {
gl2.glUnmapBuffer(target);
}
//////////////////////////////////////////////////////////////////////////////
// Synchronization
@Override
public long fenceSync(int condition, int flags) {
if (gl3es3 != null) {
return gl3es3.glFenceSync(condition, flags);
} else {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "fenceSync()"));
}
}
@Override
public void deleteSync(long sync) {
if (gl3es3 != null) {
gl3es3.glDeleteSync(sync);
} else {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "deleteSync()"));
}
}
@Override
public int clientWaitSync(long sync, int flags, long timeout) {
if (gl3es3 != null) {
return gl3es3.glClientWaitSync(sync, flags, timeout);
} else {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "clientWaitSync()"));
}
}
//////////////////////////////////////////////////////////////////////////////
// Viewport and Clipping
@Override
public void depthRangef(float n, float f) {
gl.glDepthRangef(n, f);
}
@Override
public void viewport(int x, int y, int w, int h) {
float scale = getPixelScale();
viewportImpl((int)scale * x, (int)(scale * y), (int)(scale * w), (int)(scale * h));
}
@Override
protected void viewportImpl(int x, int y, int w, int h) {
gl.glViewport(x, y, w, h);
}
//////////////////////////////////////////////////////////////////////////////
// Reading Pixels
@Override
protected void readPixelsImpl(int x, int y, int width, int height, int format, int type, Buffer buffer) {
gl.glReadPixels(x, y, width, height, format, type, buffer);
}
@Override
protected void readPixelsImpl(int x, int y, int width, int height, int format, int type, long offset) {
gl.glReadPixels(x, y, width, height, format, type, 0);
}
//////////////////////////////////////////////////////////////////////////////
// Vertices
@Override
public void vertexAttrib1f(int index, float value) {
gl2.glVertexAttrib1f(index, value);
}
@Override
public void vertexAttrib2f(int index, float value0, float value1) {
gl2.glVertexAttrib2f(index, value0, value1);
}
@Override
public void vertexAttrib3f(int index, float value0, float value1, float value2) {
gl2.glVertexAttrib3f(index, value0, value1, value2);
}
@Override
public void vertexAttrib4f(int index, float value0, float value1, float value2, float value3) {
gl2.glVertexAttrib4f(index, value0, value1, value2, value3);
}
@Override
public void vertexAttrib1fv(int index, FloatBuffer values) {
gl2.glVertexAttrib1fv(index, values);
}
@Override
public void vertexAttrib2fv(int index, FloatBuffer values) {
gl2.glVertexAttrib2fv(index, values);
}
@Override
public void vertexAttrib3fv(int index, FloatBuffer values) {
gl2.glVertexAttrib3fv(index, values);
}
@Override
public void vertexAttrib4fv(int index, FloatBuffer values) {
gl2.glVertexAttrib4fv(index, values);
}
@Override
public void vertexAttribPointer(int index, int size, int type, boolean normalized, int stride, int offset) {
gl2.glVertexAttribPointer(index, size, type, normalized, stride, offset);
}
@Override
public void enableVertexAttribArray(int index) {
gl2.glEnableVertexAttribArray(index);
}
@Override
public void disableVertexAttribArray(int index) {
gl2.glDisableVertexAttribArray(index);
}
@Override
public void drawArraysImpl(int mode, int first, int count) {
gl.glDrawArrays(mode, first, count);
}
@Override
public void drawElementsImpl(int mode, int count, int type, int offset) {
gl.glDrawElements(mode, count, type, offset);
}
//////////////////////////////////////////////////////////////////////////////
// Rasterization
@Override
public void lineWidth(float width) {
gl.glLineWidth(width);
}
@Override
public void frontFace(int dir) {
gl.glFrontFace(dir);
}
@Override
public void cullFace(int mode) {
gl.glCullFace(mode);
}
@Override
public void polygonOffset(float factor, float units) {
gl.glPolygonOffset(factor, units);
}
//////////////////////////////////////////////////////////////////////////////
// Pixel Rectangles
@Override
public void pixelStorei(int pname, int param) {
gl.glPixelStorei(pname, param);
}
///////////////////////////////////////////////////////////
// Texturing
@Override
public void texImage2D(int target, int level, int internalFormat, int width, int height, int border, int format, int type, Buffer data) {
gl.glTexImage2D(target, level, internalFormat, width, height, border, format, type, data);
}
@Override
public void copyTexImage2D(int target, int level, int internalFormat, int x, int y, int width, int height, int border) {
gl.glCopyTexImage2D(target, level, internalFormat, x, y, width, height, border);
}
@Override
public void texSubImage2D(int target, int level, int xOffset, int yOffset, int width, int height, int format, int type, Buffer data) {
gl.glTexSubImage2D(target, level, xOffset, yOffset, width, height, format, type, data);
}
@Override
public void copyTexSubImage2D(int target, int level, int xOffset, int yOffset, int x, int y, int width, int height) {
gl.glCopyTexSubImage2D(target, level, x, y, xOffset, yOffset, width, height);
}
@Override
public void compressedTexImage2D(int target, int level, int internalFormat, int width, int height, int border, int imageSize, Buffer data) {
gl.glCompressedTexImage2D(target, level, internalFormat, width, height, border, imageSize, data);
}
@Override
public void compressedTexSubImage2D(int target, int level, int xOffset, int yOffset, int width, int height, int format, int imageSize, Buffer data) {
gl.glCompressedTexSubImage2D(target, level, xOffset, yOffset, width, height, format, imageSize, data);
}
@Override
public void texParameteri(int target, int pname, int param) {
gl.glTexParameteri(target, pname, param);
}
@Override
public void texParameterf(int target, int pname, float param) {
gl.glTexParameterf(target, pname, param);
}
@Override
public void texParameteriv(int target, int pname, IntBuffer params) {
gl.glTexParameteriv(target, pname, params);
}
@Override
public void texParameterfv(int target, int pname, FloatBuffer params) {
gl.glTexParameterfv(target, pname, params);
}
@Override
public void generateMipmap(int target) {
gl.glGenerateMipmap(target);
}
@Override
public void genTextures(int n, IntBuffer textures) {
gl.glGenTextures(n, textures);
}
@Override
public void deleteTextures(int n, IntBuffer textures) {
gl.glDeleteTextures(n, textures);
}
@Override
public void getTexParameteriv(int target, int pname, IntBuffer params) {
gl.glGetTexParameteriv(target, pname, params);
}
@Override
public void getTexParameterfv(int target, int pname, FloatBuffer params) {
gl.glGetTexParameterfv(target, pname, params);
}
@Override
public boolean isTexture(int texture) {
return gl.glIsTexture(texture);
}
@Override
protected void activeTextureImpl(int texture) {
gl.glActiveTexture(texture);
}
@Override
protected void bindTextureImpl(int target, int texture) {
gl.glBindTexture(target, texture);
}
///////////////////////////////////////////////////////////
// Shaders and Programs
@Override
public int createShader(int type) {
return gl2.glCreateShader(type);
}
@Override
public void shaderSource(int shader, String source) {
gl2.glShaderSource(shader, 1, new String[] { source }, null, 0);
}
@Override
public void compileShader(int shader) {
gl2.glCompileShader(shader);
}
@Override
public void releaseShaderCompiler() {
gl2.glReleaseShaderCompiler();
}
@Override
public void deleteShader(int shader) {
gl2.glDeleteShader(shader);
}
@Override
public void shaderBinary(int count, IntBuffer shaders, int binaryFormat, Buffer binary, int length) {
gl2.glShaderBinary(count, shaders, binaryFormat, binary, length);
}
@Override
public int createProgram() {
return gl2.glCreateProgram();
}
@Override
public void attachShader(int program, int shader) {
gl2.glAttachShader(program, shader);
}
@Override
public void detachShader(int program, int shader) {
gl2.glDetachShader(program, shader);
}
@Override
public void linkProgram(int program) {
gl2.glLinkProgram(program);
}
@Override
public void useProgram(int program) {
gl2.glUseProgram(program);
}
@Override
public void deleteProgram(int program) {
gl2.glDeleteProgram(program);
}
@Override
public String getActiveAttrib(int program, int index, IntBuffer size, IntBuffer type) {
int[] tmp = {0, 0, 0};
byte[] namebuf = new byte[1024];
gl2.glGetActiveAttrib(program, index, 1024, tmp, 0, tmp, 1, tmp, 2, namebuf, 0);
size.put(tmp[1]);
type.put(tmp[2]);
return new String(namebuf, 0, tmp[0]);
}
@Override
public int getAttribLocation(int program, String name) {
return gl2.glGetAttribLocation(program, name);
}
@Override
public void bindAttribLocation(int program, int index, String name) {
gl2.glBindAttribLocation(program, index, name);
}
@Override
public int getUniformLocation(int program, String name) {
return gl2.glGetUniformLocation(program, name);
}
@Override
public String getActiveUniform(int program, int index, IntBuffer size, IntBuffer type) {
final int[] tmp = { 0, 0, 0 };
final byte[] namebuf = new byte[1024];
gl2.glGetActiveUniform(program, index, 1024, tmp, 0, tmp, 1, tmp, 2, namebuf, 0);
size.put(tmp[1]);
type.put(tmp[2]);
return new String(namebuf, 0, tmp[0]);
}
@Override
public void uniform1i(int location, int value) {
gl2.glUniform1i(location, value);
}
@Override
public void uniform2i(int location, int value0, int value1) {
gl2.glUniform2i(location, value0, value1);
}
@Override
public void uniform3i(int location, int value0, int value1, int value2) {
gl2.glUniform3i(location, value0, value1, value2);
}
@Override
public void uniform4i(int location, int value0, int value1, int value2, int value3) {
gl2.glUniform4i(location, value0, value1, value2, value3);
}
@Override
public void uniform1f(int location, float value) {
gl2.glUniform1f(location, value);
}
@Override
public void uniform2f(int location, float value0, float value1) {
gl2.glUniform2f(location, value0, value1);
}
@Override
public void uniform3f(int location, float value0, float value1, float value2) {
gl2.glUniform3f(location, value0, value1, value2);
}
@Override
public void uniform4f(int location, float value0, float value1, float value2, float value3) {
gl2.glUniform4f(location, value0, value1, value2, value3);
}
@Override
public void uniform1iv(int location, int count, IntBuffer v) {
gl2.glUniform1iv(location, count, v);
}
@Override
public void uniform2iv(int location, int count, IntBuffer v) {
gl2.glUniform2iv(location, count, v);
}
@Override
public void uniform3iv(int location, int count, IntBuffer v) {
gl2.glUniform3iv(location, count, v);
}
@Override
public void uniform4iv(int location, int count, IntBuffer v) {
gl2.glUniform4iv(location, count, v);
}
@Override
public void uniform1fv(int location, int count, FloatBuffer v) {
gl2.glUniform1fv(location, count, v);
}
@Override
public void uniform2fv(int location, int count, FloatBuffer v) {
gl2.glUniform2fv(location, count, v);
}
@Override
public void uniform3fv(int location, int count, FloatBuffer v) {
gl2.glUniform3fv(location, count, v);
}
@Override
public void uniform4fv(int location, int count, FloatBuffer v) {
gl2.glUniform4fv(location, count, v);
}
@Override
public void uniformMatrix2fv(int location, int count, boolean transpose, FloatBuffer mat) {
gl2.glUniformMatrix2fv(location, count, transpose, mat);
}
@Override
public void uniformMatrix3fv(int location, int count, boolean transpose, FloatBuffer mat) {
gl2.glUniformMatrix3fv(location, count, transpose, mat);
}
@Override
public void uniformMatrix4fv(int location, int count, boolean transpose, FloatBuffer mat) {
gl2.glUniformMatrix4fv(location, count, transpose, mat);
}
@Override
public void validateProgram(int program) {
gl2.glValidateProgram(program);
}
@Override
public boolean isShader(int shader) {
return gl2.glIsShader(shader);
}
@Override
public void getShaderiv(int shader, int pname, IntBuffer params) {
gl2.glGetShaderiv(shader, pname, params);
}
@Override
public void getAttachedShaders(int program, int maxCount, IntBuffer count, IntBuffer shaders) {
gl2.glGetAttachedShaders(program, maxCount, count, shaders);
}
@Override
public String getShaderInfoLog(int shader) {
int[] val = { 0 };
gl2.glGetShaderiv(shader, GL2ES2.GL_INFO_LOG_LENGTH, val, 0);
int length = val[0];
byte[] log = new byte[length];
gl2.glGetShaderInfoLog(shader, length, val, 0, log, 0);
return new String(log);
}
@Override
public String getShaderSource(int shader) {
int[] len = {0};
byte[] buf = new byte[1024];
gl2.glGetShaderSource(shader, 1024, len, 0, buf, 0);
return new String(buf, 0, len[0]);
}
@Override
public void getShaderPrecisionFormat(int shaderType, int precisionType, IntBuffer range, IntBuffer precision) {
gl2.glGetShaderPrecisionFormat(shaderType, precisionType, range, precision);
}
@Override
public void getVertexAttribfv(int index, int pname, FloatBuffer params) {
gl2.glGetVertexAttribfv(index, pname, params);
}
@Override
public void getVertexAttribiv(int index, int pname, IntBuffer params) {
gl2.glGetVertexAttribiv(index, pname, params);
}
@Override
public void getVertexAttribPointerv(int index, int pname, ByteBuffer data) {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "glGetVertexAttribPointerv()"));
}
@Override
public void getUniformfv(int program, int location, FloatBuffer params) {
gl2.glGetUniformfv(program, location, params);
}
@Override
public void getUniformiv(int program, int location, IntBuffer params) {
gl2.glGetUniformiv(program, location, params);
}
@Override
public boolean isProgram(int program) {
return gl2.glIsProgram(program);
}
@Override
public void getProgramiv(int program, int pname, IntBuffer params) {
gl2.glGetProgramiv(program, pname, params);
}
@Override
public String getProgramInfoLog(int program) {
int[] val = { 0 };
gl2.glGetProgramiv(program, GL2ES2.GL_INFO_LOG_LENGTH, val, 0);
int length = val[0];
if (0 < length) {
byte[] log = new byte[length];
gl2.glGetProgramInfoLog(program, length, val, 0, log, 0);
return new String(log);
} else {
return "Unknown error";
}
}
///////////////////////////////////////////////////////////
// Per-Fragment Operations
@Override
public void scissor(int x, int y, int w, int h) {
float scale = getPixelScale();
gl.glScissor((int)scale * x, (int)(scale * y), (int)(scale * w), (int)(scale * h));
// gl.glScissor(x, y, w, h);
}
@Override
public void sampleCoverage(float value, boolean invert) {
gl2.glSampleCoverage(value, invert);
}
@Override
public void stencilFunc(int func, int ref, int mask) {
gl2.glStencilFunc(func, ref, mask);
}
@Override
public void stencilFuncSeparate(int face, int func, int ref, int mask) {
gl2.glStencilFuncSeparate(face, func, ref, mask);
}
@Override
public void stencilOp(int sfail, int dpfail, int dppass) {
gl2.glStencilOp(sfail, dpfail, dppass);
}
@Override
public void stencilOpSeparate(int face, int sfail, int dpfail, int dppass) {
gl2.glStencilOpSeparate(face, sfail, dpfail, dppass);
}
@Override
public void depthFunc(int func) {
gl.glDepthFunc(func);
}
@Override
public void blendEquation(int mode) {
gl.glBlendEquation(mode);
}
@Override
public void blendEquationSeparate(int modeRGB, int modeAlpha) {
gl.glBlendEquationSeparate(modeRGB, modeAlpha);
}
@Override
public void blendFunc(int src, int dst) {
gl.glBlendFunc(src, dst);
}
@Override
public void blendFuncSeparate(int srcRGB, int dstRGB, int srcAlpha, int dstAlpha) {
gl.glBlendFuncSeparate(srcRGB, dstRGB, srcAlpha, dstAlpha);
}
@Override
public void blendColor(float red, float green, float blue, float alpha) {
gl2.glBlendColor(red, green, blue, alpha);
}
///////////////////////////////////////////////////////////
// Whole Framebuffer Operations
@Override
public void colorMask(boolean r, boolean g, boolean b, boolean a) {
gl.glColorMask(r, g, b, a);
}
@Override
public void depthMask(boolean mask) {
gl.glDepthMask(mask);
}
@Override
public void stencilMask(int mask) {
gl.glStencilMask(mask);
}
@Override
public void stencilMaskSeparate(int face, int mask) {
gl2.glStencilMaskSeparate(face, mask);
}
@Override
public void clearColor(float r, float g, float b, float a) {
gl.glClearColor(r, g, b, a);
}
@Override
public void clearDepth(float d) {
gl.glClearDepth(d);
}
@Override
public void clearStencil(int s) {
gl.glClearStencil(s);
}
@Override
public void clear(int buf) {
gl.glClear(buf);
}
///////////////////////////////////////////////////////////
// Framebuffers Objects
@Override
protected void bindFramebufferImpl(int target, int framebuffer) {
gl.glBindFramebuffer(target, framebuffer);
}
@Override
public void deleteFramebuffers(int n, IntBuffer framebuffers) {
gl.glDeleteFramebuffers(n, framebuffers);
}
@Override
public void genFramebuffers(int n, IntBuffer framebuffers) {
gl.glGenFramebuffers(n, framebuffers);
}
@Override
public void bindRenderbuffer(int target, int renderbuffer) {
gl.glBindRenderbuffer(target, renderbuffer);
}
@Override
public void deleteRenderbuffers(int n, IntBuffer renderbuffers) {
gl.glDeleteRenderbuffers(n, renderbuffers);
}
@Override
public void genRenderbuffers(int n, IntBuffer renderbuffers) {
gl.glGenRenderbuffers(n, renderbuffers);
}
@Override
public void renderbufferStorage(int target, int internalFormat, int width, int height) {
gl.glRenderbufferStorage(target, internalFormat, width, height);
}
@Override
public void framebufferRenderbuffer(int target, int attachment, int rbt, int renderbuffer) {
gl.glFramebufferRenderbuffer(target, attachment, rbt, renderbuffer);
}
@Override
public void framebufferTexture2D(int target, int attachment, int texTarget, int texture, int level) {
gl.glFramebufferTexture2D(target, attachment, texTarget, texture, level);
}
@Override
public int checkFramebufferStatus(int target) {
return gl.glCheckFramebufferStatus(target);
}
@Override
public boolean isFramebuffer(int framebuffer) {
return gl2.glIsFramebuffer(framebuffer);
}
@Override
public void getFramebufferAttachmentParameteriv(int target, int attachment, int name, IntBuffer params) {
gl2.glGetFramebufferAttachmentParameteriv(target, attachment, name, params);
}
@Override
public boolean isRenderbuffer(int renderbuffer) {
return gl2.glIsRenderbuffer(renderbuffer);
}
@Override
public void getRenderbufferParameteriv(int target, int name, IntBuffer params) {
gl2.glGetRenderbufferParameteriv(target, name, params);
}
@Override
public void blitFramebuffer(int srcX0, int srcY0, int srcX1, int srcY1, int dstX0, int dstY0, int dstX1, int dstY1, int mask, int filter) {
if (gl2x != null) {
gl2x.glBlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter);
} else if (gl3 != null) {
gl3.glBlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter);
} else if (gl3es3 != null) {
gl3es3.glBlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter);
} else {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "glBlitFramebuffer()"));
}
}
@Override
public void renderbufferStorageMultisample(int target, int samples, int format, int width, int height) {
if (gl2x != null) {
gl2x.glRenderbufferStorageMultisample(target, samples, format, width, height);
} else if (gl3 != null) {
gl3.glRenderbufferStorageMultisample(target, samples, format, width, height);
} else if (gl3es3 != null) {
gl3es3.glRenderbufferStorageMultisample(target, samples, format, width, height);
} else {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "glRenderbufferStorageMultisample()"));
}
}
@Override
public void readBuffer(int buf) {
if (gl2x != null) {
gl2x.glReadBuffer(buf);
} else if (gl3 != null) {
gl3.glReadBuffer(buf);
} else if (gl3es3 != null) {
gl3es3.glReadBuffer(buf);
} else {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "glReadBuffer()"));
}
}
@Override
public void drawBuffer(int buf) {
if (gl2x != null) {
gl2x.glDrawBuffer(buf);
} else if (gl3 != null) {
gl3.glDrawBuffer(buf);
} else if (gl3es3 != null) {
IntBuffer intBuffer = IntBuffer.allocate(1);
intBuffer.put(buf);
intBuffer.rewind();
gl3es3.glDrawBuffers(1, intBuffer);
} else {
throw new RuntimeException(String.format(MISSING_GLFUNC_ERROR, "glDrawBuffer()"));
}
}
}
|
FontOutline
|
java
|
spring-projects__spring-boot
|
configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/simple/SimpleProperties.java
|
{
"start": 943,
"end": 1142
}
|
class ____ {
/**
* The name of this simple properties.
*/
private String theName = "boot";
// isFlag is also detected
/**
* A simple flag.
*/
private boolean flag;
// An
|
SimpleProperties
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/MappedSuperclassExtendsEntityTest.java
|
{
"start": 1283,
"end": 2067
}
|
class ____ {
@Test
@JiraKey(value = "HHH-12332")
public void testQueryingSingle(SessionFactoryScope scope) {
// Make sure that the produced query for th
scope.inTransaction(
s ->
s.createQuery(
"FROM TestEntity e JOIN e.parents p1 JOIN p1.entities JOIN p1.entities2 JOIN e.parents2 p2 JOIN p2.entities JOIN p2.entities2", Object[].class )
.getResultList()
);
}
@Test
@JiraKey(value = "HHH-12332")
public void testHql(SessionFactoryScope scope) {
// Make sure that the produced query for th
scope.inTransaction(
s ->
s.createQuery( "from TestEntity", TestEntity.class ).list()
);
}
@Entity(name = "GrandParent")
@Inheritance
@DiscriminatorColumn(name = "discriminator")
public static abstract
|
MappedSuperclassExtendsEntityTest
|
java
|
hibernate__hibernate-orm
|
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/GaussDBOrdinalEnumJdbcType.java
|
{
"start": 777,
"end": 837
}
|
class ____ based on PostgreSQLOrdinalEnumJdbcType.
*/
public
|
is
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unused/Charlie.java
|
{
"start": 117,
"end": 190
}
|
class ____ {
public String ping() {
return "ok";
}
}
|
Charlie
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/web/servlet/embeddedcontainer/applicationcontext/MyDemoBean.java
|
{
"start": 1007,
"end": 1398
}
|
class ____ implements ApplicationListener<ApplicationStartedEvent> {
@SuppressWarnings("unused")
private ServletContext servletContext;
@Override
public void onApplicationEvent(ApplicationStartedEvent event) {
ApplicationContext applicationContext = event.getApplicationContext();
this.servletContext = ((WebApplicationContext) applicationContext).getServletContext();
}
}
|
MyDemoBean
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ExtendsAutoValueTest.java
|
{
"start": 8640,
"end": 8731
}
|
class ____ extends AutoClass {}
}
""")
.doTest();
}
}
|
Extends
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-links/deployment/src/main/java/io/quarkus/resteasy/reactive/links/deployment/LinksProcessor.java
|
{
"start": 2391,
"end": 6503
}
|
class ____ {
private final GetterAccessorImplementor getterAccessorImplementor = new GetterAccessorImplementor();
@BuildStep
void feature(BuildProducer<FeatureBuildItem> feature) {
feature.produce(new FeatureBuildItem(Feature.REST_LINKS));
}
@BuildStep
MethodScannerBuildItem linksSupport() {
return new MethodScannerBuildItem(new LinksMethodScanner());
}
@BuildStep
@Record(STATIC_INIT)
void initializeLinksProvider(JaxRsResourceIndexBuildItem indexBuildItem,
ResteasyReactiveResourceMethodEntriesBuildItem resourceMethodEntriesBuildItem,
BuildProducer<BytecodeTransformerBuildItem> bytecodeTransformersProducer,
BuildProducer<GeneratedClassBuildItem> generatedClassesProducer,
GetterAccessorsContainerRecorder getterAccessorsContainerRecorder,
LinksProviderRecorder linksProviderRecorder) {
IndexView index = indexBuildItem.getIndexView();
ClassOutput classOutput = new GeneratedClassGizmoAdaptor(generatedClassesProducer, true);
// Initialize links container
LinksContainer linksContainer = getLinksContainer(resourceMethodEntriesBuildItem, index);
// Implement getters to access link path parameter values
RuntimeValue<GetterAccessorsContainer> getterAccessorsContainer = implementPathParameterValueGetters(
index, classOutput, linksContainer, getterAccessorsContainerRecorder, bytecodeTransformersProducer);
linksProviderRecorder.setGetterAccessorsContainer(getterAccessorsContainer);
linksProviderRecorder.setLinksContainer(linksContainer);
}
@BuildStep
AdditionalBeanBuildItem registerRestLinksProviderProducer() {
return AdditionalBeanBuildItem.unremovableOf(RestLinksProviderProducer.class);
}
@BuildStep
@Produce(ArtifactResultBuildItem.class)
void validateJsonNeededForHal(Capabilities capabilities,
ResteasyReactiveResourceMethodEntriesBuildItem resourceMethodEntriesBuildItem) {
boolean isHalSupported = capabilities.isPresent(Capability.HAL);
if (isHalSupported && isHalMediaTypeUsedInAnyResource(resourceMethodEntriesBuildItem.getEntries())) {
if (!capabilities.isPresent(Capability.RESTEASY_REACTIVE_JSON_JSONB) && !capabilities.isPresent(
Capability.RESTEASY_REACTIVE_JSON_JACKSON)) {
throw new IllegalStateException("Cannot generate HAL endpoints without "
+ "either 'quarkus-rest-jackson' or 'quarkus-rest-jsonb'");
}
}
}
@BuildStep
void addHalSupport(Capabilities capabilities,
BuildProducer<CustomContainerResponseFilterBuildItem> customResponseFilters,
BuildProducer<AdditionalBeanBuildItem> additionalBeans) {
boolean isHalSupported = capabilities.isPresent(Capability.HAL);
if (isHalSupported) {
customResponseFilters.produce(
new CustomContainerResponseFilterBuildItem(HalServerResponseFilter.class.getName()));
additionalBeans.produce(AdditionalBeanBuildItem.unremovableOf(ResteasyReactiveHalService.class));
}
}
private boolean isHalMediaTypeUsedInAnyResource(List<ResteasyReactiveResourceMethodEntriesBuildItem.Entry> entries) {
for (ResteasyReactiveResourceMethodEntriesBuildItem.Entry entry : entries) {
for (String mediaType : entry.getResourceMethod().getProduces()) {
if (RestMediaType.APPLICATION_HAL_JSON.equals(mediaType)) {
return true;
}
}
}
return false;
}
private LinksContainer getLinksContainer(ResteasyReactiveResourceMethodEntriesBuildItem resourceMethodEntriesBuildItem,
IndexView index) {
LinksContainerFactory linksContainerFactory = new LinksContainerFactory();
return linksContainerFactory.getLinksContainer(resourceMethodEntriesBuildItem.getEntries(), index);
}
/**
* For each path parameter implement a getter method in a
|
LinksProcessor
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/OpenApiWithResteasyPathHttpRootDefaultPathTestCase.java
|
{
"start": 307,
"end": 1377
}
|
class ____ {
private static final String OPEN_API_PATH = "/q/openapi";
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(OpenApiResource.class, ResourceBean.class)
.addAsResource(new StringAsset("quarkus.http.root-path=/http-root-path\n" +
"quarkus.rest.path=/resteasy-path"),
"application.properties"));
@Test
public void testOpenApiResteasyPathHttpRootDefaultPath() {
RestAssured.given().queryParam("format", "JSON")
.when().get(OPEN_API_PATH)
.then()
.header("Content-Type", "application/json;charset=UTF-8")
.body("openapi", Matchers.startsWith("3.1"))
.body("info.title", Matchers.equalTo("quarkus-smallrye-openapi-deployment API"))
.body("paths", Matchers.hasKey("/http-root-path/resteasy-path/resource"));
}
}
|
OpenApiWithResteasyPathHttpRootDefaultPathTestCase
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/telemetry/tracing/Traceable.java
|
{
"start": 592,
"end": 763
}
|
interface ____ {
/**
* A consistent id for the span. Should be structured "[short-name]-[unique-id]" ie "request-abc1234"
*/
String getSpanId();
}
|
Traceable
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/AbstractEsFieldTypeTests.java
|
{
"start": 1118,
"end": 3601
}
|
class ____<T extends EsField> extends AbstractWireTestCase<T> {
public static EsField randomAnyEsField(int maxDepth) {
return switch (between(0, 5)) {
case 0 -> EsFieldTests.randomEsField(maxDepth);
case 1 -> DateEsFieldTests.randomDateEsField(maxDepth);
case 2 -> InvalidMappedFieldTests.randomInvalidMappedField(maxDepth);
case 3 -> KeywordEsFieldTests.randomKeywordEsField(maxDepth);
case 4 -> TextEsFieldTests.randomTextEsField(maxDepth);
case 5 -> UnsupportedEsFieldTests.randomUnsupportedEsField(maxDepth);
default -> throw new IllegalArgumentException();
};
}
@Override
protected abstract T createTestInstance();
@Override
protected T copyInstance(T instance, TransportVersion version) throws IOException {
NamedWriteableRegistry namedWriteableRegistry = getNamedWriteableRegistry();
try (BytesStreamOutput output = new BytesStreamOutput(); var pso = new PlanStreamOutput(output, EsqlTestUtils.TEST_CFG)) {
pso.setTransportVersion(version);
instance.writeTo(pso);
try (
StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry);
var psi = new PlanStreamInput(in, in.namedWriteableRegistry(), config(), new SerializationTestUtils.TestNameIdMapper())
) {
psi.setTransportVersion(version);
return EsField.readFrom(psi);
}
}
}
protected Configuration config() {
return EsqlTestUtils.TEST_CFG;
}
/**
* Generate sub-properties.
* @param maxDepth the maximum number of levels of properties to make
*/
static Map<String, EsField> randomProperties(int maxDepth) {
if (maxDepth < 0) {
throw new IllegalArgumentException("depth must be >= 0");
}
if (maxDepth == 0 || randomBoolean()) {
return Map.of();
}
int targetSize = between(1, 5);
Map<String, EsField> properties = new TreeMap<>();
while (properties.size() < targetSize) {
properties.put(randomAlphaOfLength(properties.size() + 1), randomAnyEsField(maxDepth - 1));
}
return properties;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(List.of());
}
}
|
AbstractEsFieldTypeTests
|
java
|
spring-projects__spring-framework
|
spring-context-indexer/src/main/java/org/springframework/context/index/processor/CandidateComponentsIndexer.java
|
{
"start": 1736,
"end": 4654
}
|
class ____ implements Processor {
private MetadataStore metadataStore;
private MetadataCollector metadataCollector;
private TypeHelper typeHelper;
private List<StereotypesProvider> stereotypesProviders;
@Override
public Set<String> getSupportedOptions() {
return Collections.emptySet();
}
@Override
public Set<String> getSupportedAnnotationTypes() {
return Collections.singleton("*");
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latest();
}
@Override
public synchronized void init(ProcessingEnvironment env) {
this.stereotypesProviders = getStereotypesProviders(env);
this.typeHelper = new TypeHelper(env);
this.metadataStore = new MetadataStore(env);
this.metadataCollector = new MetadataCollector(env, this.metadataStore.readMetadata());
}
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
this.metadataCollector.processing(roundEnv);
roundEnv.getRootElements().forEach(this::processElement);
if (roundEnv.processingOver()) {
writeMetaData();
}
return false;
}
@Override
public Iterable<? extends Completion> getCompletions(
Element element, AnnotationMirror annotation, ExecutableElement member, String userText) {
return Collections.emptyList();
}
private List<StereotypesProvider> getStereotypesProviders(ProcessingEnvironment env) {
List<StereotypesProvider> result = new ArrayList<>();
TypeHelper typeHelper = new TypeHelper(env);
result.add(new IndexedStereotypesProvider(typeHelper));
result.add(new StandardStereotypesProvider(typeHelper));
result.add(new PackageInfoStereotypesProvider());
return result;
}
private void processElement(Element element) {
addMetadataFor(element);
staticTypesIn(element.getEnclosedElements()).forEach(this::processElement);
}
private void addMetadataFor(Element element) {
Set<String> stereotypes = new LinkedHashSet<>();
this.stereotypesProviders.forEach(p -> stereotypes.addAll(p.getStereotypes(element)));
if (!stereotypes.isEmpty()) {
this.metadataCollector.add(new ItemMetadata(this.typeHelper.getType(element), stereotypes));
}
}
private void writeMetaData() {
CandidateComponentsMetadata metadata = this.metadataCollector.getMetadata();
if (!metadata.getItems().isEmpty()) {
try {
this.metadataStore.writeMetadata(metadata);
}
catch (IOException ex) {
throw new IllegalStateException("Failed to write metadata", ex);
}
}
}
private static List<TypeElement> staticTypesIn(Iterable<? extends Element> elements) {
List<TypeElement> list = new ArrayList<>();
for (Element element : elements) {
if ((element.getKind().isClass() || element.getKind() == ElementKind.INTERFACE) &&
element.getModifiers().contains(Modifier.STATIC) && element instanceof TypeElement te) {
list.add(te);
}
}
return list;
}
}
|
CandidateComponentsIndexer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java
|
{
"start": 1611,
"end": 4829
}
|
class ____ extends IbmWatsonxModel {
private URI uri;
public IbmWatsonxEmbeddingsModel(
String inferenceEntityId,
TaskType taskType,
String service,
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
ChunkingSettings chunkingSettings,
Map<String, Object> secrets,
ConfigurationParseContext context
) {
this(
inferenceEntityId,
taskType,
service,
IbmWatsonxEmbeddingsServiceSettings.fromMap(serviceSettings, context),
EmptyTaskSettings.INSTANCE,
chunkingSettings,
DefaultSecretSettings.fromMap(secrets)
);
}
public IbmWatsonxEmbeddingsModel(IbmWatsonxEmbeddingsModel model, IbmWatsonxEmbeddingsServiceSettings serviceSettings) {
super(model, serviceSettings);
}
// Should only be used directly for testing
IbmWatsonxEmbeddingsModel(
String inferenceEntityId,
TaskType taskType,
String service,
IbmWatsonxEmbeddingsServiceSettings serviceSettings,
TaskSettings taskSettings,
ChunkingSettings chunkingsettings,
@Nullable DefaultSecretSettings secrets
) {
super(
new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings, chunkingsettings),
new ModelSecrets(secrets),
serviceSettings
);
try {
this.uri = buildUri(serviceSettings.url().toString(), serviceSettings.apiVersion());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
// Should only be used directly for testing
IbmWatsonxEmbeddingsModel(
String inferenceEntityId,
TaskType taskType,
String service,
String uri,
IbmWatsonxEmbeddingsServiceSettings serviceSettings,
TaskSettings taskSettings,
@Nullable DefaultSecretSettings secrets
) {
super(
new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings),
new ModelSecrets(secrets),
serviceSettings
);
try {
this.uri = new URI(uri);
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
@Override
public IbmWatsonxEmbeddingsServiceSettings getServiceSettings() {
return (IbmWatsonxEmbeddingsServiceSettings) super.getServiceSettings();
}
@Override
public DefaultSecretSettings getSecretSettings() {
return (DefaultSecretSettings) super.getSecretSettings();
}
public URI uri() {
return uri;
}
@Override
public ExecutableAction accept(IbmWatsonxActionVisitor visitor, Map<String, Object> taskSettings) {
return visitor.create(this, taskSettings);
}
public static URI buildUri(String uri, String apiVersion) throws URISyntaxException {
return new URIBuilder().setScheme("https")
.setHost(uri)
.setPathSegments(ML, V1, TEXT, EMBEDDINGS)
.setParameter("version", apiVersion)
.build();
}
}
|
IbmWatsonxEmbeddingsModel
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java
|
{
"start": 1458,
"end": 3182
}
|
class ____<K, V> extends RecordReader<K, V> {
RecordReader<K, V> originalRR;
/**
* Constructs the DelegatingRecordReader.
*
* @param split TaggegInputSplit object
* @param context TaskAttemptContext object
*
* @throws IOException
* @throws InterruptedException
*/
@SuppressWarnings("unchecked")
public DelegatingRecordReader(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException {
// Find the InputFormat and then the RecordReader from the
// TaggedInputSplit.
TaggedInputSplit taggedInputSplit = (TaggedInputSplit) split;
InputFormat<K, V> inputFormat = (InputFormat<K, V>) ReflectionUtils
.newInstance(taggedInputSplit.getInputFormatClass(), context
.getConfiguration());
originalRR = inputFormat.createRecordReader(taggedInputSplit
.getInputSplit(), context);
}
@Override
public void close() throws IOException {
originalRR.close();
}
@Override
public K getCurrentKey() throws IOException, InterruptedException {
return originalRR.getCurrentKey();
}
@Override
public V getCurrentValue() throws IOException, InterruptedException {
return originalRR.getCurrentValue();
}
@Override
public float getProgress() throws IOException, InterruptedException {
return originalRR.getProgress();
}
@Override
public void initialize(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException {
originalRR.initialize(((TaggedInputSplit) split).getInputSplit(), context);
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
return originalRR.nextKeyValue();
}
}
|
DelegatingRecordReader
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingWithMultipleManyToOne.java
|
{
"start": 2211,
"end": 2678
}
|
class ____ {
@Id
@GeneratedValue
private Integer id;
private String name;
@ManyToOne
private Parent parent;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Parent getParent() {
return parent;
}
public void setParent(Parent parent) {
this.parent = parent;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
|
ChildB
|
java
|
apache__camel
|
components/camel-ssh/src/test/java/org/apache/camel/component/ssh/SshComponentKnownHostEdDSATest.java
|
{
"start": 986,
"end": 2466
}
|
class ____ extends SshComponentTestSupport {
@Test
public void testProducerWithEdDSAKeyType() throws Exception {
final String msg = "test";
MockEndpoint mock = getMockEndpoint("mock:password");
mock.expectedMinimumMessageCount(1);
mock.expectedBodiesReceived(msg);
mock.expectedHeaderReceived(SshConstants.EXIT_VALUE, 0);
mock.expectedHeaderReceived(SshConstants.STDERR, "Error:test");
template.sendBody("direct:ssh", msg);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected String getHostKey() {
return "src/test/resources/key_ed25519.pem";
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() {
onException(Exception.class).handled(true).to("mock:error");
from("ssh://smx:smx@localhost:" + port
+ "?useFixedDelay=true&delay=40000&pollCommand=test%0A&knownHostsResource=classpath:known_hosts_eddsa&failOnUnknownHost=true")
.to("mock:result");
from("direct:ssh")
.to("ssh://smx:smx@localhost:" + port
+ "?timeout=3000&knownHostsResource=classpath:known_hosts_eddsa&failOnUnknownHost=true")
.to("mock:password");
}
};
}
}
|
SshComponentKnownHostEdDSATest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommandTests.java
|
{
"start": 4157,
"end": 39908
}
|
class ____ extends ESTestCase {
private static final String CA_PASSWORD = "ca-password";
private FileSystem jimfs;
private Path testRoot;
@Before
public void createTestDir() throws Exception {
Configuration conf = Configuration.unix().toBuilder().setAttributeViews("posix").build();
jimfs = Jimfs.newFileSystem(conf);
testRoot = jimfs.getPath(getClass().getSimpleName() + "-" + getTestName());
IOUtils.rm(testRoot);
Files.createDirectories(testRoot);
}
@BeforeClass
public static void muteInFips() {
assumeFalse("Can't run in a FIPS JVM", inFipsJvm());
}
public void testGenerateSingleCertificateSigningRequest() throws Exception {
final Path outFile = testRoot.resolve("csr.zip").toAbsolutePath();
final List<String> hostNames = randomHostNames();
final List<String> ipAddresses = randomIpAddresses();
final String certificateName = hostNames.get(0);
final HttpCertificateCommand command = new PathAwareHttpCertificateCommand(outFile);
final MockTerminal terminal = MockTerminal.create();
terminal.addTextInput("y"); // generate CSR
terminal.addTextInput(randomBoolean() ? "n" : ""); // cert-per-node
// enter hostnames
hostNames.forEach(terminal::addTextInput);
terminal.addTextInput(""); // end-of-hosts
terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct
// enter ip names
ipAddresses.forEach(terminal::addTextInput);
terminal.addTextInput(""); // end-of-ips
terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct
terminal.addTextInput(randomBoolean() ? "n" : ""); // don't change advanced settings
final String password = randomPassword(false);
terminal.addSecretInput(password);
if ("".equals(password) == false) {
terminal.addSecretInput(password);
} // confirm
terminal.addTextInput(outFile.toString());
final Environment env = newEnvironment();
final OptionSet options = command.getParser().parse(new String[0]);
command.execute(terminal, options, env, new ProcessInfo(Map.of(), Map.of(), createTempDir()));
Path zipRoot = getZipRoot(outFile);
assertThat(zipRoot.resolve("elasticsearch"), isDirectory());
final Path csrPath = zipRoot.resolve("elasticsearch/http-" + certificateName + ".csr");
final PKCS10CertificationRequest csr = readPemObject(csrPath, "CERTIFICATE REQUEST", PKCS10CertificationRequest::new);
final Path keyPath = zipRoot.resolve("elasticsearch/http-" + certificateName + ".key");
final AtomicBoolean wasEncrypted = new AtomicBoolean(false);
final PrivateKey privateKey = PemUtils.readPrivateKey(keyPath, () -> {
wasEncrypted.set(true);
return password.toCharArray();
});
if ("".equals(password) == false) {
assertTrue("Password should have been required to decrypted key", wasEncrypted.get());
}
final Path esReadmePath = zipRoot.resolve("elasticsearch/README.txt");
assertThat(esReadmePath, isRegularFile());
final String esReadme = Files.readString(esReadmePath);
final Path ymlPath = zipRoot.resolve("elasticsearch/sample-elasticsearch.yml");
assertThat(ymlPath, isRegularFile());
final String yml = Files.readString(ymlPath);
// Verify the CSR was built correctly
verifyCertificationRequest(csr, certificateName, hostNames, ipAddresses);
// Verify the key
assertMatchingPair(getPublicKey(csr), privateKey);
final String csrName = csrPath.getFileName().toString();
final String crtName = csrName.substring(0, csrName.length() - 4) + ".crt";
// Verify the README
assertThat(esReadme, containsString(csrName));
assertThat(esReadme, containsString(crtName));
assertThat(esReadme, containsString(keyPath.getFileName().toString()));
assertThat(esReadme, containsString(ymlPath.getFileName().toString()));
if ("".equals(password) == false) {
assertThat(esReadme, not(containsString(password)));
}
// Verify the yml
assertThat(yml, not(containsString(csrName)));
assertThat(yml, containsString(crtName));
assertThat(yml, containsString(keyPath.getFileName().toString()));
if ("".equals(password) == false) {
assertThat(yml, not(containsString(password)));
}
// Should not be a CA directory in CSR mode
assertThat(zipRoot.resolve("ca"), not(pathExists()));
// No CA in CSR mode
verifyKibanaDirectory(
zipRoot,
false,
List.of("Certificate Signing Request"),
Stream.of(password, csrName).filter(s -> "".equals(s) == false).collect(Collectors.toList())
);
}
public void testGenerateSingleCertificateWithExistingCA() throws Exception {
final Path outFile = testRoot.resolve("certs.zip").toAbsolutePath();
final List<String> hostNames = randomHostNames();
final List<String> ipAddresses = randomIpAddresses();
final String certificateName = hostNames.get(0);
final Path caCertPath = getDataPath("ca.crt");
assertThat(caCertPath, isRegularFile());
final Path caKeyPath = getDataPath("ca.key");
assertThat(caKeyPath, isRegularFile());
final String caPassword = CA_PASSWORD;
final int years = randomIntBetween(1, 8);
final HttpCertificateCommand command = new PathAwareHttpCertificateCommand(outFile);
final MockTerminal terminal = MockTerminal.create();
terminal.addTextInput(randomBoolean() ? "n" : ""); // don't generate CSR
terminal.addTextInput("y"); // existing CA
// randomise between cert+key, key+cert, PKCS12 : the tool is smart enough to handle any of those.
switch (randomFrom(FileType.PEM_CERT, FileType.PEM_KEY, FileType.PKCS12)) {
case PEM_CERT -> {
terminal.addTextInput(caCertPath.toAbsolutePath().toString());
terminal.addTextInput(caKeyPath.toAbsolutePath().toString());
}
case PEM_KEY -> {
terminal.addTextInput(caKeyPath.toAbsolutePath().toString());
terminal.addTextInput(caCertPath.toAbsolutePath().toString());
}
case PKCS12 -> terminal.addTextInput(getDataPath("ca.p12").toAbsolutePath().toString());
}
terminal.addSecretInput(caPassword);
terminal.addTextInput(years + "y"); // validity period
terminal.addTextInput(randomBoolean() ? "n" : ""); // don't use cert-per-node
// enter hostnames
hostNames.forEach(terminal::addTextInput);
terminal.addTextInput(""); // end-of-hosts
terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct
// enter ip names
ipAddresses.forEach(terminal::addTextInput);
terminal.addTextInput(""); // end-of-ips
terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct
terminal.addTextInput(randomBoolean() ? "n" : ""); // don't change advanced settings
final String password = randomPassword(randomBoolean());
terminal.addSecretInput(password);
if ("".equals(password) == false) {
terminal.addSecretInput(password);
if (password.length() > 50) {
terminal.addTextInput("y"); // Accept OpenSSL issue
}
} // confirm
terminal.addTextInput(outFile.toString());
final Environment env = newEnvironment();
final OptionSet options = command.getParser().parse(new String[0]);
command.execute(terminal, options, env, new ProcessInfo(Map.of(), Map.of(), createTempDir()));
if (password.length() > 50) {
assertThat(terminal.getOutput(), containsString("OpenSSL"));
} else {
assertThat(terminal.getOutput(), not(containsString("OpenSSL")));
}
Path zipRoot = getZipRoot(outFile);
assertThat(zipRoot.resolve("elasticsearch"), isDirectory());
final Path p12Path = zipRoot.resolve("elasticsearch/http.p12");
final Path readmePath = zipRoot.resolve("elasticsearch/README.txt");
assertThat(readmePath, isRegularFile());
final String readme = Files.readString(readmePath);
final Path ymlPath = zipRoot.resolve("elasticsearch/sample-elasticsearch.yml");
assertThat(ymlPath, isRegularFile());
final String yml = Files.readString(ymlPath);
final Tuple<X509Certificate, PrivateKey> certAndKey = readCertificateAndKey(p12Path, password.toCharArray());
// Verify the Cert was built correctly
verifyCertificate(certAndKey.v1(), certificateName, years, hostNames, ipAddresses);
assertThat(getRSAKeySize(certAndKey.v1().getPublicKey()), is(HttpCertificateCommand.DEFAULT_CERT_KEY_SIZE));
assertThat(getRSAKeySize(certAndKey.v2()), is(HttpCertificateCommand.DEFAULT_CERT_KEY_SIZE));
final X509Certificate caCert = CertParsingUtils.readX509Certificate(caCertPath);
verifyChain(certAndKey.v1(), caCert);
// Verify the README
assertThat(readme, containsString(p12Path.getFileName().toString()));
assertThat(readme, containsString(ymlPath.getFileName().toString()));
if ("".equals(password) == false) {
assertThat(readme, not(containsString(password)));
}
assertThat(readme, not(containsString(caPassword)));
// Verify the yml
assertThat(yml, containsString(p12Path.getFileName().toString()));
if ("".equals(password) == false) {
assertThat(yml, not(containsString(password)));
}
assertThat(yml, not(containsString(caPassword)));
// Should not be a CA directory when using an existing CA.
assertThat(zipRoot.resolve("ca"), not(pathExists()));
verifyKibanaDirectory(
zipRoot,
true,
List.of("2. elasticsearch-ca.pem"),
Stream.of(password, caPassword, caKeyPath.getFileName().toString())
.filter(s -> "".equals(s) == false)
.collect(Collectors.toList())
);
}
public void testGenerateMultipleCertificateWithNewCA() throws Exception {
final Path outFile = testRoot.resolve("certs.zip").toAbsolutePath();
final int numberCerts = randomIntBetween(3, 6);
final String[] certNames = new String[numberCerts];
final String[] hostNames = new String[numberCerts];
for (int i = 0; i < numberCerts; i++) {
certNames[i] = randomAlphaOfLengthBetween(6, 12);
hostNames[i] = randomAlphaOfLengthBetween(4, 8);
}
final HttpCertificateCommand command = new PathAwareHttpCertificateCommand(outFile);
final MockTerminal terminal = MockTerminal.create();
terminal.addTextInput(randomBoolean() ? "n" : ""); // don't generate CSR
terminal.addTextInput(randomBoolean() ? "n" : ""); // no existing CA
final String caDN;
final int caYears;
final int caKeySize;
final List<String> caKeyUsage;
// randomise whether to change CA defaults.
if (randomBoolean()) {
terminal.addTextInput("y"); // Change defaults
caDN = "CN=" + randomAlphaOfLengthBetween(3, 8);
caYears = randomIntBetween(1, 3);
caKeySize = randomFrom(2048, 3072, 4096);
caKeyUsage = randomNonEmptySubsetOf(CertGenUtils.KEY_USAGE_MAPPINGS.keySet());
terminal.addTextInput(caDN);
terminal.addTextInput(caYears + "y");
terminal.addTextInput(Integer.toString(caKeySize));
terminal.addTextInput(Strings.collectionToCommaDelimitedString(caKeyUsage));
terminal.addTextInput("n"); // Don't change values
} else {
terminal.addTextInput(randomBoolean() ? "n" : ""); // Don't change defaults
caDN = HttpCertificateCommand.DEFAULT_CA_NAME.toString();
caYears = HttpCertificateCommand.DEFAULT_CA_VALIDITY.getYears();
caKeySize = HttpCertificateCommand.DEFAULT_CA_KEY_SIZE;
caKeyUsage = HttpCertificateCommand.DEFAULT_CA_KEY_USAGE;
}
final String caPassword = randomPassword(randomBoolean());
boolean expectLongPasswordWarning = caPassword.length() > 50;
// randomly enter a long password here, and then say "no" on the warning prompt
if (randomBoolean()) {
String longPassword = randomAlphaOfLengthBetween(60, 120);
terminal.addSecretInput(longPassword);
terminal.addSecretInput(longPassword);
terminal.addTextInput("n"); // Change our mind
expectLongPasswordWarning = true;
}
terminal.addSecretInput(caPassword);
if ("".equals(caPassword) == false) {
terminal.addSecretInput(caPassword);
if (caPassword.length() > 50) {
terminal.addTextInput("y"); // Acknowledge possible OpenSSL issue
}
} // confirm
final int certYears = randomIntBetween(1, 8);
terminal.addTextInput(certYears + "y"); // node cert validity period
terminal.addTextInput("y"); // cert-per-node
for (int i = 0; i < numberCerts; i++) {
if (i != 0) {
terminal.addTextInput(randomBoolean() ? "y" : ""); // another cert
}
// certificate / node name
terminal.addTextInput(certNames[i]);
// enter hostname
terminal.addTextInput(hostNames[i]); // end-of-hosts
terminal.addTextInput(""); // end-of-hosts
terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct
// no ip
terminal.addTextInput(""); // end-of-ip
terminal.addTextInput(randomBoolean() ? "y" : ""); // yes, correct
terminal.addTextInput(randomBoolean() ? "n" : ""); // don't change advanced settings
}
terminal.addTextInput("n"); // no more certs
final String password = randomPassword(false);
// randomly enter an incorrect password here which will fail the "enter twice" check and prompt to try again
if (randomBoolean()) {
String wrongPassword = randomAlphaOfLengthBetween(8, 20);
terminal.addTextInput(wrongPassword);
terminal.addTextInput("__" + wrongPassword);
}
terminal.addSecretInput(password);
if ("".equals(password) == false) {
terminal.addSecretInput(password);
} // confirm
terminal.addTextInput(outFile.toString());
final Environment env = newEnvironment();
final OptionSet options = command.getParser().parse(new String[0]);
command.execute(terminal, options, env, new ProcessInfo(Map.of(), Map.of(), createTempDir()));
if (expectLongPasswordWarning) {
assertThat(terminal.getOutput(), containsString("OpenSSL"));
} else {
assertThat(terminal.getOutput(), not(containsString("OpenSSL")));
}
Path zipRoot = getZipRoot(outFile);
// Should have a CA directory with the generated CA.
assertThat(zipRoot.resolve("ca"), isDirectory());
final Path caPath = zipRoot.resolve("ca/ca.p12");
final Tuple<X509Certificate, PrivateKey> caCertKey = readCertificateAndKey(caPath, caPassword.toCharArray());
verifyCertificate(caCertKey.v1(), caDN.replaceFirst("CN=", ""), caYears, List.of(), List.of());
assertThat(getRSAKeySize(caCertKey.v1().getPublicKey()), is(caKeySize));
assertThat(getRSAKeySize(caCertKey.v2()), is(caKeySize));
assertExpectedKeyUsage(caCertKey.v1(), caKeyUsage);
assertThat(zipRoot.resolve("elasticsearch"), isDirectory());
for (int i = 0; i < numberCerts; i++) {
assertThat(zipRoot.resolve("elasticsearch/" + certNames[i]), isDirectory());
final Path p12Path = zipRoot.resolve("elasticsearch/" + certNames[i] + "/http.p12");
assertThat(p12Path, isRegularFile());
final Path readmePath = zipRoot.resolve("elasticsearch/" + certNames[i] + "/README.txt");
assertThat(readmePath, isRegularFile());
final String readme = Files.readString(readmePath);
final Path ymlPath = zipRoot.resolve("elasticsearch/" + certNames[i] + "/sample-elasticsearch.yml");
assertThat(ymlPath, isRegularFile());
final String yml = Files.readString(ymlPath);
final Tuple<X509Certificate, PrivateKey> certAndKey = readCertificateAndKey(p12Path, password.toCharArray());
// Verify the Cert was built correctly
verifyCertificate(certAndKey.v1(), certNames[i], certYears, List.of(hostNames[i]), List.of());
verifyChain(certAndKey.v1(), caCertKey.v1());
assertThat(getRSAKeySize(certAndKey.v1().getPublicKey()), is(HttpCertificateCommand.DEFAULT_CERT_KEY_SIZE));
assertThat(getRSAKeySize(certAndKey.v2()), is(HttpCertificateCommand.DEFAULT_CERT_KEY_SIZE));
assertExpectedKeyUsage(certAndKey.v1(), HttpCertificateCommand.DEFAULT_CERT_KEY_USAGE);
// Verify the README
assertThat(readme, containsString(p12Path.getFileName().toString()));
assertThat(readme, containsString(ymlPath.getFileName().toString()));
if ("".equals(password) == false) {
assertThat(readme, not(containsString(password)));
}
if ("".equals(caPassword) == false) {
assertThat(readme, not(containsString(caPassword)));
}
// Verify the yml
assertThat(yml, containsString(p12Path.getFileName().toString()));
if ("".equals(password) == false) {
assertThat(yml, not(containsString(password)));
}
if ("".equals(caPassword) == false) {
assertThat(yml, not(containsString(caPassword)));
}
}
verifyKibanaDirectory(
zipRoot,
true,
List.of("2. elasticsearch-ca.pem"),
Stream.of(password, caPassword, caPath.getFileName().toString()).filter(s -> "".equals(s) == false).collect(Collectors.toList())
);
}
public void testParsingValidityPeriod() throws Exception {
final MockTerminal terminal = MockTerminal.create();
terminal.addTextInput("2y");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 1), is(Period.ofYears(2)));
terminal.addTextInput("18m");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 1), is(Period.ofMonths(18)));
terminal.addTextInput("90d");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 1), is(Period.ofDays(90)));
terminal.addTextInput("1y, 6m");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 1), is(Period.ofYears(1).withMonths(6)));
// Test: Re-prompt on bad input.
terminal.addTextInput("2m & 4d");
terminal.addTextInput("2m 4d");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 1), is(Period.ofMonths(2).withDays(4)));
terminal.addTextInput("1y, 6m");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 1), is(Period.ofYears(1).withMonths(6)));
// Test: Accept default value
final Period p = Period.of(randomIntBetween(1, 5), randomIntBetween(0, 11), randomIntBetween(0, 30));
terminal.addTextInput("");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", p, 1), is(p));
final int y = randomIntBetween(1, 5);
final int m = randomIntBetween(1, 11);
final int d = randomIntBetween(1, 30);
terminal.addTextInput(y + "y " + m + "m " + d + "d");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 1), is(Period.of(y, m, d)));
// Test: Minimum Days
final int shortDays = randomIntBetween(1, 20);
terminal.addTextInput(shortDays + "d");
terminal.addTextInput("y"); // I'm sure
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 21), is(Period.ofDays(shortDays)));
terminal.addTextInput(shortDays + "d");
terminal.addTextInput("n"); // I'm not sure
terminal.addTextInput("30d");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 21), is(Period.ofDays(30)));
terminal.addTextInput("2m");
terminal.addTextInput("n"); // I'm not sure
terminal.addTextInput("2y");
assertThat(HttpCertificateCommand.readPeriodInput(terminal, "", null, 90), is(Period.ofYears(2)));
}
public void testValidityPeriodToString() throws Exception {
assertThat(HttpCertificateCommand.toString(Period.ofYears(2)), is("2y"));
assertThat(HttpCertificateCommand.toString(Period.ofMonths(5)), is("5m"));
assertThat(HttpCertificateCommand.toString(Period.ofDays(60)), is("60d"));
assertThat(HttpCertificateCommand.toString(Period.ZERO), is("0d"));
assertThat(HttpCertificateCommand.toString(null), is("N/A"));
final int y = randomIntBetween(1, 5);
final int m = randomIntBetween(1, 11);
final int d = randomIntBetween(1, 30);
assertThat(HttpCertificateCommand.toString(Period.of(y, m, d)), is(y + "y," + m + "m," + d + "d"));
}
public void testGuessFileType() throws Exception {
MockTerminal terminal = MockTerminal.create();
final Path caCert = getDataPath("ca.crt");
final Path caKey = getDataPath("ca.key");
assertThat(guessFileType(caCert, terminal), is(FileType.PEM_CERT));
assertThat(guessFileType(caKey, terminal), is(FileType.PEM_KEY));
final Path certChain = testRoot.resolve("ca.pem");
try (OutputStream out = Files.newOutputStream(certChain)) {
Files.copy(getDataPath("testnode.crt"), out);
Files.copy(caCert, out);
}
assertThat(guessFileType(certChain, terminal), is(FileType.PEM_CERT_CHAIN));
final Path tmpP12 = testRoot.resolve("tmp.p12");
assertThat(guessFileType(tmpP12, terminal), is(FileType.PKCS12));
final Path tmpJks = testRoot.resolve("tmp.jks");
assertThat(guessFileType(tmpJks, terminal), is(FileType.JKS));
final Path tmpKeystore = testRoot.resolve("tmp.keystore");
writeDummyKeystore(tmpKeystore, "PKCS12");
assertThat(guessFileType(tmpKeystore, terminal), is(FileType.PKCS12));
writeDummyKeystore(tmpKeystore, "jks");
assertThat(guessFileType(tmpKeystore, terminal), is(FileType.JKS));
}
public void testTextFileSubstitutions() throws Exception {
CheckedBiFunction<String, Map<String, String>, String, Exception> copy = (source, subs) -> {
try (
InputStream in = new ByteArrayInputStream(source.getBytes(StandardCharsets.UTF_8));
StringWriter out = new StringWriter();
PrintWriter writer = new PrintWriter(out)
) {
HttpCertificateCommand.copyWithSubstitutions(in, writer, subs);
return out.toString().replace("\r\n", "\n");
}
};
assertThat(copy.apply("abc\n", Map.of()), is("abc\n"));
assertThat(copy.apply("${not_a_var}\n", Map.of()), is("${not_a_var}\n"));
assertThat(copy.apply("${var}\n", Map.of("var", "xyz")), is("xyz\n"));
assertThat(copy.apply("#if not\nbody\n#endif\n", Map.of()), is(""));
assertThat(copy.apply("#if blank\nbody\n#endif\n", Map.of("blank", "")), is(""));
assertThat(copy.apply("#if yes\nbody\n#endif\n", Map.of("yes", "true")), is("body\n"));
assertThat(copy.apply("#if yes\ntrue\n#else\nfalse\n#endif\n", Map.of("yes", "*")), is("true\n"));
assertThat(copy.apply("#if blank\ntrue\n#else\nfalse\n#endif\n", Map.of("blank", "")), is("false\n"));
assertThat(copy.apply("#if var\n--> ${var} <--\n#else\n(${var})\n#endif\n", Map.of("var", "foo")), is("--> foo <--\n"));
}
private Path getZipRoot(Path outFile) throws IOException, URISyntaxException {
assertThat(outFile, isRegularFile());
FileSystem fileSystem = FileSystems.newFileSystem(new URI("jar:" + outFile.toUri()), Collections.emptyMap());
return fileSystem.getPath("/");
}
private List<String> randomIpAddresses() throws UnknownHostException {
final int ipCount = randomIntBetween(0, 3);
final List<String> ipAddresses = new ArrayList<>(ipCount);
for (int i = 0; i < ipCount; i++) {
String ip = randomIpAddress();
ipAddresses.add(ip);
}
return ipAddresses;
}
private String randomIpAddress() throws UnknownHostException {
return formatIpAddress(randomByteArrayOfLength(4));
}
private String formatIpAddress(byte[] addr) throws UnknownHostException {
return NetworkAddress.format(InetAddress.getByAddress(addr));
}
private List<String> randomHostNames() {
final int hostCount = randomIntBetween(1, 5);
final List<String> hostNames = new ArrayList<>(hostCount);
for (int i = 0; i < hostCount; i++) {
String host = String.join(".", randomArray(1, 4, String[]::new, () -> randomAlphaOfLengthBetween(3, 8)));
if (i > 0 && randomBoolean()) {
host = "*." + host;
}
hostNames.add(host);
}
return hostNames;
}
private String randomPassword(boolean longPassword) {
// We want to assert that this password doesn't end up in any output files, so we need to make sure we
// don't randomly generate a real word.
return randomFrom(
"",
randomAlphaOfLengthBetween(4, 8) + randomFrom('~', '*', '%', '$', '|') + randomAlphaOfLength(longPassword ? 100 : 4)
);
}
private void verifyCertificationRequest(
PKCS10CertificationRequest csr,
String certificateName,
List<String> hostNames,
List<String> ipAddresses
) throws IOException {
// We rebuild the DN from the encoding because BC uses openSSL style toString, but we use LDAP style.
assertThat(new X500Principal(csr.getSubject().getEncoded()).toString(), is("CN=" + certificateName.replaceAll("\\.", ", DC=")));
final Attribute[] extensionAttributes = csr.getAttributes(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest);
assertThat(extensionAttributes, arrayWithSize(1));
assertThat(extensionAttributes[0].getAttributeValues(), arrayWithSize(1));
assertThat(extensionAttributes[0].getAttributeValues()[0], instanceOf(DLSequence.class));
// We register 1 extension with the subject alternative names and extended key usage
final Extensions extensions = Extensions.getInstance(extensionAttributes[0].getAttributeValues()[0]);
assertThat(extensions, notNullValue());
assertThat(
extensions.getExtensionOIDs(),
arrayContainingInAnyOrder(Extension.subjectAlternativeName, Extension.keyUsage, Extension.extendedKeyUsage)
);
final GeneralNames names = GeneralNames.fromExtensions(extensions, Extension.subjectAlternativeName);
assertThat(names.getNames(), arrayWithSize(hostNames.size() + ipAddresses.size()));
for (GeneralName name : names.getNames()) {
assertThat(name.getTagNo(), oneOf(GeneralName.dNSName, GeneralName.iPAddress));
if (name.getTagNo() == GeneralName.dNSName) {
final String dns = DERIA5String.getInstance(name.getName()).getString();
assertThat(dns, in(hostNames));
} else if (name.getTagNo() == GeneralName.iPAddress) {
final String ip = formatIpAddress(DEROctetString.getInstance(name.getName()).getOctets());
assertThat(ip, in(ipAddresses));
}
}
ExtendedKeyUsage extendedKeyUsage = ExtendedKeyUsage.fromExtensions(extensions);
assertThat(extendedKeyUsage.getUsages(), arrayContainingInAnyOrder(KeyPurposeId.id_kp_serverAuth));
KeyUsage keyUsage = KeyUsage.fromExtensions(extensions);
assertThat(keyUsage, is(equalTo(new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyEncipherment))));
}
private void verifyCertificate(
X509Certificate cert,
String certificateName,
int years,
List<String> hostNames,
List<String> ipAddresses
) throws CertificateParsingException {
assertThat(cert.getSubjectX500Principal().toString(), is("CN=" + certificateName.replaceAll("\\.", ", DC=")));
final Collection<List<?>> san = cert.getSubjectAlternativeNames();
final int expectedSanEntries = hostNames.size() + ipAddresses.size();
if (expectedSanEntries > 0) {
assertThat(san, hasSize(expectedSanEntries));
for (List<?> name : san) {
assertThat(name, hasSize(2));
assertThat(name.get(0), Matchers.instanceOf(Integer.class));
assertThat(name.get(1), Matchers.instanceOf(String.class));
final Integer tag = (Integer) name.get(0);
final String value = (String) name.get(1);
assertThat(tag, oneOf(GeneralName.dNSName, GeneralName.iPAddress));
if (tag.intValue() == GeneralName.dNSName) {
assertThat(value, in(hostNames));
} else if (tag.intValue() == GeneralName.iPAddress) {
assertThat(value, in(ipAddresses));
}
}
} else if (san != null) {
assertThat(san, hasSize(0));
}
// We don't know exactly when the certificate was generated, but it should have been in the last 10 minutes
long now = System.currentTimeMillis();
long nowMinus10Minutes = now - TimeUnit.MINUTES.toMillis(10);
assertThat(cert.getNotBefore().getTime(), Matchers.lessThanOrEqualTo(now));
assertThat(cert.getNotBefore().getTime(), Matchers.greaterThanOrEqualTo(nowMinus10Minutes));
final ZonedDateTime expiry = Instant.ofEpochMilli(cert.getNotBefore().getTime()).atZone(ZoneOffset.UTC).plusYears(years);
assertThat(cert.getNotAfter().getTime(), is(expiry.toInstant().toEpochMilli()));
}
private void verifyChain(X509Certificate... chain) throws GeneralSecurityException {
for (int i = 1; i < chain.length; i++) {
assertThat(chain[i - 1].getIssuerX500Principal(), is(chain[i].getSubjectX500Principal()));
chain[i - 1].verify(chain[i].getPublicKey());
}
final X509Certificate root = chain[chain.length - 1];
assertThat(root.getIssuerX500Principal(), is(root.getSubjectX500Principal()));
}
/**
* Checks that a public + private key are a matching pair.
*/
private void assertMatchingPair(PublicKey publicKey, PrivateKey privateKey) throws GeneralSecurityException {
final byte[] bytes = randomByteArrayOfLength(128);
final Signature rsa = Signature.getInstance("SHA512withRSA");
rsa.initSign(privateKey);
rsa.update(bytes);
final byte[] signature = rsa.sign();
rsa.initVerify(publicKey);
rsa.update(bytes);
assertTrue("PublicKey and PrivateKey are not a matching pair", rsa.verify(signature));
}
private void verifyKibanaDirectory(
Path zipRoot,
boolean expectCAFile,
Iterable<String> readmeShouldContain,
Iterable<String> shouldNotContain
) throws IOException {
assertThat(zipRoot.resolve("kibana"), isDirectory());
if (expectCAFile) {
assertThat(zipRoot.resolve("kibana/elasticsearch-ca.pem"), isRegularFile());
} else {
assertThat(zipRoot.resolve("kibana/elasticsearch-ca.pem"), not(pathExists()));
}
final Path kibanaReadmePath = zipRoot.resolve("kibana/README.txt");
assertThat(kibanaReadmePath, isRegularFile());
final String kibanaReadme = Files.readString(kibanaReadmePath);
final Path kibanaYmlPath = zipRoot.resolve("kibana/sample-kibana.yml");
assertThat(kibanaYmlPath, isRegularFile());
final String kibanaYml = Files.readString(kibanaYmlPath);
assertThat(kibanaReadme, containsString(kibanaYmlPath.getFileName().toString()));
assertThat(kibanaReadme, containsString("elasticsearch.hosts"));
assertThat(kibanaReadme, containsString("https://"));
assertThat(kibanaReadme, containsString("elasticsearch-ca.pem"));
readmeShouldContain.forEach(s -> assertThat(kibanaReadme, containsString(s)));
shouldNotContain.forEach(s -> assertThat(kibanaReadme, not(containsString(s))));
assertThat(kibanaYml, containsString("elasticsearch.ssl.certificateAuthorities: [ \"config/elasticsearch-ca.pem\" ]"));
assertThat(kibanaYml, containsString("https://"));
shouldNotContain.forEach(s -> assertThat(kibanaYml, not(containsString(s))));
}
private PublicKey getPublicKey(PKCS10CertificationRequest pkcs) throws GeneralSecurityException {
return new JcaPKCS10CertificationRequest(pkcs).getPublicKey();
}
private int getRSAKeySize(Key key) {
assertThat(key, instanceOf(RSAKey.class));
final RSAKey rsa = (RSAKey) key;
return rsa.getModulus().bitLength();
}
private Tuple<X509Certificate, PrivateKey> readCertificateAndKey(Path pkcs12, char[] password) throws IOException,
GeneralSecurityException {
final Map<Certificate, Key> entries = CertParsingUtils.readPkcs12KeyPairs(pkcs12, password, alias -> password);
assertThat(entries.entrySet(), Matchers.hasSize(1));
Certificate cert = entries.keySet().iterator().next();
Key key = entries.get(cert);
assertThat(cert, instanceOf(X509Certificate.class));
assertThat(key, instanceOf(PrivateKey.class));
assertMatchingPair(cert.getPublicKey(), (PrivateKey) key);
return new Tuple<>((X509Certificate) cert, (PrivateKey) key);
}
private <T> T readPemObject(Path path, String expectedType, CheckedFunction<? super byte[], T, IOException> factory)
throws IOException {
assertThat(path, isRegularFile());
final PemReader csrReader = new PemReader(Files.newBufferedReader(path));
final PemObject csrPem = csrReader.readPemObject();
assertThat(csrPem.getType(), is(expectedType));
return factory.apply(csrPem.getContent());
}
private void writeDummyKeystore(Path path, String type) throws GeneralSecurityException, IOException {
Files.deleteIfExists(path);
KeyStore ks = KeyStore.getInstance(type);
ks.load(null);
if (randomBoolean()) {
final X509Certificate cert = CertParsingUtils.readX509Certificate(getDataPath("ca.crt"));
ks.setCertificateEntry(randomAlphaOfLength(4), cert);
}
try (OutputStream out = Files.newOutputStream(path)) {
ks.store(out, randomAlphaOfLength(8).toCharArray());
}
}
/**
* A special version of {@link HttpCertificateCommand} that can resolve input strings back to JIMFS paths
*/
private
|
HttpCertificateCommandTests
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/json/code/javaguide/json/JavaJsonCustomObjectMapperModule.java
|
{
"start": 313,
"end": 552
}
|
class ____ extends AbstractModule {
@Override
protected void configure() {
bind(ObjectMapper.class).toProvider(JavaJsonCustomObjectMapper.class).asEagerSingleton();
}
}
// #custom-java-object-mapper2
|
JavaJsonCustomObjectMapperModule
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/authorization/AuthorizationPoller.java
|
{
"start": 10832,
"end": 11638
}
|
class ____ implements Consumer<ActionListener<Void>> {
@Override
public void accept(ActionListener<Void> listener) {
logger.info("Skipping sending authorization request and completing task, because poller is shutting down");
// We should already be shutdown, so this should just be a noop
shutdownInternal(AuthorizationPoller.this::markAsCompleted);
listener.onResponse(null);
}
}
private record RegistryNotReadyAction() implements Consumer<ActionListener<Void>> {
@Override
public void accept(ActionListener<Void> listener) {
logger.info("Skipping sending authorization request, because model registry is not ready");
listener.onResponse(null);
}
}
private
|
ShutdownAction
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/suppress/CustomSuppressionTest.java
|
{
"start": 5089,
"end": 5407
}
|
class ____ {
@SuppressBothCheckers
int identity(int value) {
return value;
}
@SuppressMyChecker2
int square(int value) {
return value * value;
}
}
""")
.doTest();
}
}
|
Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/spi/PreCollectionRemoveEventListener.java
|
{
"start": 208,
"end": 316
}
|
interface ____ {
void onPreRemoveCollection(PreCollectionRemoveEvent event);
}
|
PreCollectionRemoveEventListener
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/suite/engine/testsuites/ThreePartCyclicSuite.java
|
{
"start": 852,
"end": 986
}
|
class ____ {
}
@Suite
@IncludeClassNamePatterns(".*")
@SelectClasses({ PartB.class, SingleTestTestCase.class })
public static
|
PartB
|
java
|
apache__flink
|
flink-tests-java17/src/test/java/org/apache/flink/api/java/typeutils/runtime/PojoRecordSerializerTest.java
|
{
"start": 4195,
"end": 9153
}
|
class ____ {
public int dumm1;
public String dumm2;
public double dumm3;
public int[] dumm4;
public NestedTestUserClass() {}
public NestedTestUserClass(int dumm1, String dumm2, double dumm3, int[] dumm4) {
this.dumm1 = dumm1;
this.dumm2 = dumm2;
this.dumm3 = dumm3;
this.dumm4 = dumm4;
}
@Override
public int hashCode() {
return Objects.hash(dumm1, dumm2, dumm3, dumm4);
}
@Override
public boolean equals(Object other) {
if (!(other instanceof NestedTestUserClass)) {
return false;
}
NestedTestUserClass otherTUC = (NestedTestUserClass) other;
if (dumm1 != otherTUC.dumm1) {
return false;
}
if (!dumm2.equals(otherTUC.dumm2)) {
return false;
}
if (dumm3 != otherTUC.dumm3) {
return false;
}
if (dumm4.length != otherTUC.dumm4.length) {
return false;
}
for (int i = 0; i < dumm4.length; i++) {
if (dumm4[i] != otherTUC.dumm4[i]) {
return false;
}
}
return true;
}
}
/** This tests if the hashes returned by the pojo and tuple comparators are the same. */
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
void testTuplePojoTestEquality() throws IncompatibleKeysException {
// test with a simple, string-key first.
PojoTypeInfo<TestUserClass> pType = (PojoTypeInfo<TestUserClass>) type;
List<FlatFieldDescriptor> result = new ArrayList<>();
pType.getFlatFields("nestedClass.dumm2", 0, result);
int[] fields = new int[1]; // see below
fields[0] = result.get(0).getPosition();
TypeComparator<TestUserClass> pojoComp =
pType.createComparator(fields, new boolean[] {true}, 0, new ExecutionConfig());
TestUserClass pojoTestRecord =
new TestUserClass(
0,
"abc",
3d,
new Date(),
new NestedTestUserClass(1, "haha", 4d, new int[] {5, 4, 3}));
int pHash = pojoComp.hash(pojoTestRecord);
Tuple1<String> tupleTest = new Tuple1<>("haha");
TupleTypeInfo<Tuple1<String>> tType =
(TupleTypeInfo<Tuple1<String>>) TypeExtractor.getForObject(tupleTest);
TypeComparator<Tuple1<String>> tupleComp =
tType.createComparator(
new int[] {0}, new boolean[] {true}, 0, new ExecutionConfig());
int tHash = tupleComp.hash(tupleTest);
assertThat(tHash)
.isEqualTo(pHash)
.withFailMessage(
"The hashing for tuples and pojos must be the same, so that they are mixable");
Tuple3<Integer, String, Double> multiTupleTest =
new Tuple3<>(1, "haha", 4d); // its important here to use the same values.
TupleTypeInfo<Tuple3<Integer, String, Double>> multiTupleType =
(TupleTypeInfo<Tuple3<Integer, String, Double>>)
TypeExtractor.getForObject(multiTupleTest);
ExpressionKeys fieldKey = new ExpressionKeys(new int[] {1, 0, 2}, multiTupleType);
ExpressionKeys expressKey =
new ExpressionKeys(
new String[] {
"nestedClass.dumm2", "nestedClass.dumm1", "nestedClass.dumm3"
},
pType);
assertThat(fieldKey.areCompatible(expressKey))
.isTrue()
.withFailMessage("Expecting the keys to be compatible");
TypeComparator<TestUserClass> multiPojoComp =
pType.createComparator(
expressKey.computeLogicalKeyPositions(),
new boolean[] {true, true, true},
0,
new ExecutionConfig());
int multiPojoHash = multiPojoComp.hash(pojoTestRecord);
// pojo order is: dumm2 (str), dumm1 (int), dumm3 (double).
TypeComparator<Tuple3<Integer, String, Double>> multiTupleComp =
multiTupleType.createComparator(
fieldKey.computeLogicalKeyPositions(),
new boolean[] {true, true, true},
0,
new ExecutionConfig());
int multiTupleHash = multiTupleComp.hash(multiTupleTest);
assertThat(multiPojoHash)
.isEqualTo(multiTupleHash)
.withFailMessage(
"The hashing for tuples and pojos must be the same, so that they are mixable. Also for those with multiple key fields");
}
}
|
NestedTestUserClass
|
java
|
quarkusio__quarkus
|
extensions/reactive-db2-client/runtime/src/main/java/io/quarkus/reactive/db2/client/runtime/DB2PoolRecorder.java
|
{
"start": 2297,
"end": 13809
}
|
class ____ {
private static final boolean SUPPORTS_CACHE_PREPARED_STATEMENTS = true;
private static final Logger log = Logger.getLogger(DB2PoolRecorder.class);
private static final TypeLiteral<Instance<DB2PoolCreator>> POOL_CREATOR_TYPE_LITERAL = new TypeLiteral<>() {
};
private final RuntimeValue<DataSourcesRuntimeConfig> runtimeConfig;
private final RuntimeValue<DataSourcesReactiveRuntimeConfig> reactiveRuntimeConfig;
private final RuntimeValue<DataSourcesReactiveDB2Config> reactiveDB2RuntimeConfig;
public DB2PoolRecorder(
RuntimeValue<DataSourcesRuntimeConfig> runtimeConfig,
RuntimeValue<DataSourcesReactiveRuntimeConfig> reactiveRuntimeConfig,
RuntimeValue<DataSourcesReactiveDB2Config> reactiveDB2RuntimeConfig) {
this.runtimeConfig = runtimeConfig;
this.reactiveRuntimeConfig = reactiveRuntimeConfig;
this.reactiveDB2RuntimeConfig = reactiveDB2RuntimeConfig;
}
public Supplier<ActiveResult> poolCheckActiveSupplier(String dataSourceName) {
return new Supplier<>() {
@Override
public ActiveResult get() {
Optional<Boolean> active = runtimeConfig.getValue().dataSources().get(dataSourceName).active();
if (active.isPresent() && !active.get()) {
return ActiveResult.inactive(DataSourceUtil.dataSourceInactiveReasonDeactivated(dataSourceName));
}
if (reactiveRuntimeConfig.getValue().dataSources().get(dataSourceName).reactive().url().isEmpty()) {
return ActiveResult.inactive(DataSourceUtil.dataSourceInactiveReasonUrlMissing(dataSourceName,
"reactive.url"));
}
return ActiveResult.active();
}
};
}
public Function<SyntheticCreationalContext<DB2Pool>, DB2Pool> configureDB2Pool(RuntimeValue<Vertx> vertx,
Supplier<Integer> eventLoopCount, String dataSourceName, ShutdownContext shutdown) {
return new Function<>() {
@Override
public DB2Pool apply(SyntheticCreationalContext<DB2Pool> context) {
DB2Pool db2Pool = initialize((VertxInternal) vertx.getValue(),
eventLoopCount.get(),
dataSourceName,
runtimeConfig.getValue().dataSources().get(dataSourceName),
reactiveRuntimeConfig.getValue().dataSources().get(dataSourceName).reactive(),
reactiveDB2RuntimeConfig.getValue().dataSources().get(dataSourceName).reactive().db2(),
context);
shutdown.addShutdownTask(db2Pool::close);
return db2Pool;
}
};
}
public Function<SyntheticCreationalContext<io.vertx.mutiny.db2client.DB2Pool>, io.vertx.mutiny.db2client.DB2Pool> mutinyDB2Pool(
String dataSourceName) {
return new Function<>() {
@SuppressWarnings("unchecked")
@Override
public io.vertx.mutiny.db2client.DB2Pool apply(SyntheticCreationalContext context) {
return io.vertx.mutiny.db2client.DB2Pool.newInstance(
(DB2Pool) context.getInjectedReference(DB2Pool.class, qualifier(dataSourceName)));
}
};
}
private DB2Pool initialize(VertxInternal vertx,
Integer eventLoopCount,
String dataSourceName,
DataSourceRuntimeConfig dataSourceRuntimeConfig,
DataSourceReactiveRuntimeConfig dataSourceReactiveRuntimeConfig,
DataSourceReactiveDB2Config dataSourceReactiveDB2Config,
SyntheticCreationalContext<DB2Pool> context) {
PoolOptions poolOptions = toPoolOptions(eventLoopCount, dataSourceReactiveRuntimeConfig);
DB2ConnectOptions db2ConnectOptions = toConnectOptions(dataSourceName, dataSourceRuntimeConfig,
dataSourceReactiveRuntimeConfig, dataSourceReactiveDB2Config);
Supplier<Future<DB2ConnectOptions>> databasesSupplier = toDatabasesSupplier(List.of(db2ConnectOptions),
dataSourceRuntimeConfig);
return createPool(vertx, poolOptions, db2ConnectOptions, dataSourceName, databasesSupplier, context);
}
private Supplier<Future<DB2ConnectOptions>> toDatabasesSupplier(List<DB2ConnectOptions> db2ConnectOptionsList,
DataSourceRuntimeConfig dataSourceRuntimeConfig) {
Supplier<Future<DB2ConnectOptions>> supplier;
if (dataSourceRuntimeConfig.credentialsProvider().isPresent()) {
String beanName = dataSourceRuntimeConfig.credentialsProviderName().orElse(null);
CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName);
String name = dataSourceRuntimeConfig.credentialsProvider().get();
supplier = new ConnectOptionsSupplier<>(credentialsProvider, name, db2ConnectOptionsList,
DB2ConnectOptions::new);
} else {
supplier = Utils.roundRobinSupplier(db2ConnectOptionsList);
}
return supplier;
}
private PoolOptions toPoolOptions(Integer eventLoopCount, DataSourceReactiveRuntimeConfig dataSourceReactiveRuntimeConfig) {
PoolOptions poolOptions;
poolOptions = new PoolOptions();
poolOptions.setMaxSize(dataSourceReactiveRuntimeConfig.maxSize());
if (dataSourceReactiveRuntimeConfig.idleTimeout().isPresent()) {
var idleTimeout = unitised(dataSourceReactiveRuntimeConfig.idleTimeout().get());
poolOptions.setIdleTimeout(idleTimeout.value).setIdleTimeoutUnit(idleTimeout.unit);
}
if (dataSourceReactiveRuntimeConfig.maxLifetime().isPresent()) {
var maxLifetime = unitised(dataSourceReactiveRuntimeConfig.maxLifetime().get());
poolOptions.setMaxLifetime(maxLifetime.value).setMaxLifetimeUnit(maxLifetime.unit);
}
if (dataSourceReactiveRuntimeConfig.shared()) {
poolOptions.setShared(true);
if (dataSourceReactiveRuntimeConfig.name().isPresent()) {
poolOptions.setName(dataSourceReactiveRuntimeConfig.name().get());
}
}
if (dataSourceReactiveRuntimeConfig.eventLoopSize().isPresent()) {
poolOptions.setEventLoopSize(Math.max(0, dataSourceReactiveRuntimeConfig.eventLoopSize().getAsInt()));
} else if (eventLoopCount != null) {
poolOptions.setEventLoopSize(Math.max(0, eventLoopCount));
}
return poolOptions;
}
private DB2ConnectOptions toConnectOptions(String dataSourceName, DataSourceRuntimeConfig dataSourceRuntimeConfig,
DataSourceReactiveRuntimeConfig dataSourceReactiveRuntimeConfig,
DataSourceReactiveDB2Config dataSourceReactiveDB2Config) {
DB2ConnectOptions connectOptions;
if (dataSourceReactiveRuntimeConfig.url().isPresent()) {
List<String> urls = dataSourceReactiveRuntimeConfig.url().get();
if (urls.size() > 1) {
log.warn("The Reactive DB2 client does not support multiple URLs. The first one will be used, and " +
"others will be ignored.");
}
String url = urls.get(0);
// clean up the URL to make migrations easier
if (url.matches("^vertx-reactive:db2://.*$")) {
url = url.substring("vertx-reactive:".length());
}
connectOptions = DB2ConnectOptions.fromUri(url);
} else {
connectOptions = new DB2ConnectOptions();
}
if (dataSourceRuntimeConfig.username().isPresent()) {
connectOptions.setUser(dataSourceRuntimeConfig.username().get());
}
if (dataSourceRuntimeConfig.password().isPresent()) {
connectOptions.setPassword(dataSourceRuntimeConfig.password().get());
}
// credentials provider
if (dataSourceRuntimeConfig.credentialsProvider().isPresent()) {
String beanName = dataSourceRuntimeConfig.credentialsProviderName().orElse(null);
CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName);
String name = dataSourceRuntimeConfig.credentialsProvider().get();
Map<String, String> credentials = credentialsProvider.getCredentialsAsync(name).await().indefinitely();
String user = credentials.get(USER_PROPERTY_NAME);
String password = credentials.get(PASSWORD_PROPERTY_NAME);
if (user != null) {
connectOptions.setUser(user);
}
if (password != null) {
connectOptions.setPassword(password);
}
}
connectOptions.setCachePreparedStatements(
dataSourceReactiveRuntimeConfig.cachePreparedStatements().orElse(SUPPORTS_CACHE_PREPARED_STATEMENTS));
connectOptions.setSsl(dataSourceReactiveDB2Config.ssl());
connectOptions.setTrustAll(dataSourceReactiveRuntimeConfig.trustAll());
configurePemTrustOptions(connectOptions, dataSourceReactiveRuntimeConfig.trustCertificatePem());
configureJksTrustOptions(connectOptions, dataSourceReactiveRuntimeConfig.trustCertificateJks());
configurePfxTrustOptions(connectOptions, dataSourceReactiveRuntimeConfig.trustCertificatePfx());
configurePemKeyCertOptions(connectOptions, dataSourceReactiveRuntimeConfig.keyCertificatePem());
configureJksKeyCertOptions(connectOptions, dataSourceReactiveRuntimeConfig.keyCertificateJks());
configurePfxKeyCertOptions(connectOptions, dataSourceReactiveRuntimeConfig.keyCertificatePfx());
connectOptions.setReconnectAttempts(dataSourceReactiveRuntimeConfig.reconnectAttempts());
connectOptions.setReconnectInterval(dataSourceReactiveRuntimeConfig.reconnectInterval().toMillis());
var algo = dataSourceReactiveRuntimeConfig.hostnameVerificationAlgorithm();
if ("NONE".equalsIgnoreCase(algo)) {
connectOptions.setHostnameVerificationAlgorithm("");
} else {
connectOptions.setHostnameVerificationAlgorithm(algo);
}
dataSourceReactiveRuntimeConfig.additionalProperties().forEach(connectOptions::addProperty);
// Use the convention defined by Quarkus Micrometer Vert.x metrics to create metrics prefixed with db2.
// and the client_name as tag.
// See io.quarkus.micrometer.runtime.binder.vertx.VertxMeterBinderAdapter.extractPrefix and
// io.quarkus.micrometer.runtime.binder.vertx.VertxMeterBinderAdapter.extractClientName
connectOptions.setMetricsName("db2|" + dataSourceName);
return connectOptions;
}
private DB2Pool createPool(Vertx vertx, PoolOptions poolOptions, DB2ConnectOptions dB2ConnectOptions,
String dataSourceName, Supplier<Future<DB2ConnectOptions>> databases,
SyntheticCreationalContext<DB2Pool> context) {
Instance<DB2PoolCreator> instance = context.getInjectedReference(POOL_CREATOR_TYPE_LITERAL, qualifier(dataSourceName));
if (instance.isResolvable()) {
DB2PoolCreator.Input input = new DefaultInput(vertx, poolOptions, dB2ConnectOptions);
return (DB2Pool) instance.get().create(input);
}
return (DB2Pool) DB2Driver.INSTANCE.createPool(vertx, databases, poolOptions);
}
private static
|
DB2PoolRecorder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/metamodel/model/domain/internal/EntityDiscriminatorSqmPath.java
|
{
"start": 1085,
"end": 2756
}
|
class ____<T> extends AbstractSqmPath<T> implements DiscriminatorSqmPath<T> {
private final SqmEntityDomainType entityDomainType;
private final EntityMappingType entityDescriptor;
protected EntityDiscriminatorSqmPath(
NavigablePath navigablePath,
SqmPathSource referencedPathSource,
SqmPath<?> lhs,
SqmEntityDomainType entityDomainType,
EntityMappingType entityDescriptor,
NodeBuilder nodeBuilder) {
super( navigablePath, referencedPathSource, lhs, nodeBuilder );
this.entityDomainType = entityDomainType;
this.entityDescriptor = entityDescriptor;
}
public EntityDomainType getEntityDomainType() {
return entityDomainType;
}
public EntityMappingType getEntityDescriptor() {
return entityDescriptor;
}
@Override
public @NonNull SqmPath<?> getLhs() {
return castNonNull( super.getLhs() );
}
@Override
public @NonNull EntityDiscriminatorSqmPathSource getExpressible() {
// return (EntityDiscriminatorSqmPathSource) getNodeType();
return (EntityDiscriminatorSqmPathSource) getReferencedPathSource();
}
@Override
public EntityDiscriminatorSqmPath copy(SqmCopyContext context) {
final EntityDiscriminatorSqmPath existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
else {
return context.registerCopy(
this,
(EntityDiscriminatorSqmPath) getLhs().copy( context ).type()
);
}
}
@Override
public <X> X accept(SemanticQueryWalker<X> walker) {
return entityDescriptor.hasSubclasses()
? walker.visitDiscriminatorPath( this )
: walker.visitEntityTypeLiteralExpression( new SqmLiteralEntityType( entityDomainType, nodeBuilder() ) );
}
}
|
EntityDiscriminatorSqmPath
|
java
|
alibaba__nacos
|
test/core-test/src/test/java/com/alibaba/nacos/test/core/auth/LdapAuthCoreITCase.java
|
{
"start": 3033,
"end": 3199
}
|
class ____ extends LdapBase {
@Test
void testLdapAuth() throws Exception {
super.login("karson", "karson");
}
}
}
|
TlsTest
|
java
|
google__auto
|
factory/src/test/resources/expected/CustomNullableFactory.java
|
{
"start": 871,
"end": 1585
}
|
class ____ {
private final Provider<Object> objectProvider;
@Inject
CustomNullableFactory(Provider<Object> objectProvider) {
this.objectProvider = checkNotNull(objectProvider, 1, 1);
}
CustomNullable create(@CustomNullable.Nullable String string) {
return new CustomNullable(string, objectProvider.get());
}
private static <T> T checkNotNull(T reference, int argumentNumber, int argumentCount) {
if (reference == null) {
throw new NullPointerException(
"@AutoFactory method argument is null but is not marked @Nullable. Argument "
+ argumentNumber
+ " of "
+ argumentCount);
}
return reference;
}
}
|
CustomNullableFactory
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/context/config/NamePropertyDefaultValueDubboConfigBeanCustomizer.java
|
{
"start": 1544,
"end": 3322
}
|
class ____ implements DubboConfigBeanCustomizer {
/**
* The bean name of {@link NamePropertyDefaultValueDubboConfigBeanCustomizer}
*
* @since 2.7.1
*/
public static final String BEAN_NAME = "namePropertyDefaultValueDubboConfigBeanCustomizer";
/**
* The name of property that is "name" maybe is absent in target class
*/
private static final String PROPERTY_NAME = "name";
@Override
public void customize(String beanName, AbstractConfig dubboConfigBean) {
PropertyDescriptor propertyDescriptor = getPropertyDescriptor(dubboConfigBean.getClass(), PROPERTY_NAME);
if (propertyDescriptor != null) { // "name" property is present
Method getNameMethod = propertyDescriptor.getReadMethod();
if (getNameMethod == null) { // if "getName" method is absent
return;
}
Object propertyValue = ReflectionUtils.invokeMethod(getNameMethod, dubboConfigBean);
if (propertyValue != null) { // If The return value of "getName" method is not null
return;
}
Method setNameMethod = propertyDescriptor.getWriteMethod();
if (setNameMethod != null) { // "setName" and "getName" methods are present
if (Arrays.equals(
ObjectUtils.of(String.class), setNameMethod.getParameterTypes())) { // the param type is String
// set bean name to the value of the "name" property
ReflectionUtils.invokeMethod(setNameMethod, dubboConfigBean, beanName);
}
}
}
}
@Override
public int getOrder() {
return HIGHEST_PRECEDENCE;
}
}
|
NamePropertyDefaultValueDubboConfigBeanCustomizer
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/BeanDefinitionLoader.java
|
{
"start": 11158,
"end": 11230
}
|
interface ____ {
Closure<?> getBeans();
}
}
|
GroovyBeanDefinitionSource
|
java
|
apache__camel
|
components/camel-univocity-parsers/src/main/java/org/apache/camel/dataformat/univocity/UniVocityFixedDataFormat.java
|
{
"start": 1416,
"end": 5838
}
|
class ____
extends
AbstractUniVocityDataFormat<FixedWidthFormat, FixedWidthWriterSettings, FixedWidthWriter, FixedWidthParserSettings, FixedWidthParser, UniVocityFixedDataFormat> {
private String fieldLengths;
private Boolean skipTrailingCharsUntilNewline;
private Boolean recordEndsOnNewline;
private Character padding;
public int[] fieldLengthsAsArray() {
if (fieldLengths == null) {
return null;
}
String[] arr = fieldLengths.split(",");
int[] answer = new int[arr.length];
for (int i = 0; i < arr.length; i++) {
answer[i] = Integer.parseInt(arr[i]);
}
return answer;
}
public String getFieldLengths() {
return fieldLengths;
}
public void setFieldLengths(String fieldLengths) {
this.fieldLengths = fieldLengths;
}
public Boolean getSkipTrailingCharsUntilNewline() {
return skipTrailingCharsUntilNewline;
}
public void setSkipTrailingCharsUntilNewline(Boolean skipTrailingCharsUntilNewline) {
this.skipTrailingCharsUntilNewline = skipTrailingCharsUntilNewline;
}
public Boolean getRecordEndsOnNewline() {
return recordEndsOnNewline;
}
public void setRecordEndsOnNewline(Boolean recordEndsOnNewline) {
this.recordEndsOnNewline = recordEndsOnNewline;
}
public Character getPadding() {
return padding;
}
public void setPadding(Character padding) {
this.padding = padding;
}
@Override
protected FixedWidthWriterSettings createWriterSettings() {
return new FixedWidthWriterSettings(createFixedWidthFields());
}
@Override
protected FixedWidthWriter createWriter(Writer writer, FixedWidthWriterSettings settings) {
return new FixedWidthWriter(writer, settings);
}
@Override
protected FixedWidthParserSettings createParserSettings() {
return new FixedWidthParserSettings(createFixedWidthFields());
}
@Override
protected void configureParserSettings(FixedWidthParserSettings settings) {
super.configureParserSettings(settings);
if (skipTrailingCharsUntilNewline != null) {
settings.setSkipTrailingCharsUntilNewline(skipTrailingCharsUntilNewline);
}
if (recordEndsOnNewline != null) {
settings.setRecordEndsOnNewline(true);
}
}
@Override
protected FixedWidthParser createParser(FixedWidthParserSettings settings) {
return new FixedWidthParser(settings);
}
@Override
protected void configureFormat(FixedWidthFormat format) {
super.configureFormat(format);
if (padding != null) {
format.setPadding(padding);
}
}
/**
* Creates the {@link com.univocity.parsers.fixed.FixedWidthFields} instance based on the headers and field lengths.
*
* @return new {@code FixedWidthFields} based on the header and field lengths.
*/
private FixedWidthFields createFixedWidthFields() {
// Ensure that the field lengths have been defined.
if (fieldLengths == null) {
throw new IllegalArgumentException(
"The fieldLengths must have been defined in order to use the fixed-width format.");
}
// If there's no header then we only use their length
if (headers == null || headers.isBlank()) {
return new FixedWidthFields(fieldLengthsAsArray());
}
String[] arr1 = headersAsArray();
int[] arr2 = fieldLengthsAsArray();
// Use both headers and field lengths (same size and no duplicate headers)
if (arr1.length != arr2.length) {
throw new IllegalArgumentException(
"The headers and fieldLengths must have the same number of element in order to use the fixed-width format.");
}
LinkedHashMap<String, Integer> fields = new LinkedHashMap<>();
for (int i = 0; i < arr1.length; i++) {
fields.put(arr1[i], arr2[i]);
}
if (fields.size() != arr1.length) {
throw new IllegalArgumentException("The headers cannot have duplicates in order to use the fixed-width format.");
}
return new FixedWidthFields(fields);
}
@Override
public String getDataFormatName() {
return "univocityFixed";
}
}
|
UniVocityFixedDataFormat
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoRequest.java
|
{
"start": 742,
"end": 1018
}
|
class ____ extends LegacyActionRequest {
public RemoteInfoRequest() {}
RemoteInfoRequest(StreamInput in) throws IOException {
super(in);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}
|
RemoteInfoRequest
|
java
|
google__guice
|
core/test/com/google/inject/internal/MapBinderTest.java
|
{
"start": 23836,
"end": 29309
}
|
class ____ {
int keyPart;
int dataPart;
private ValueType(int keyPart, int dataPart) {
this.keyPart = keyPart;
this.dataPart = dataPart;
}
@Override
public boolean equals(Object obj) {
return (obj instanceof ValueType) && (keyPart == ((ValueType) obj).keyPart);
}
@Override
public int hashCode() {
return keyPart;
}
}
Module m1 =
new AbstractModule() {
@Override
protected void configure() {
MapBinder<String, ValueType> multibinder =
MapBinder.newMapBinder(binder(), String.class, ValueType.class);
multibinder.addBinding("a").toInstance(new ValueType(1, 2));
}
};
Module m2 =
new AbstractModule() {
@Override
protected void configure() {
MapBinder<String, ValueType> multibinder =
MapBinder.newMapBinder(binder(), String.class, ValueType.class);
multibinder.addBinding("b").toInstance(new ValueType(1, 3));
}
};
Injector injector = Guice.createInjector(m1, m2);
Map<String, ValueType> map = injector.getInstance(new Key<Map<String, ValueType>>() {});
assertEquals(2, map.get("a").dataPart);
assertEquals(3, map.get("b").dataPart);
}
public void testMapBinderMultimap() {
AbstractModule ab1c =
new AbstractModule() {
@Override
protected void configure() {
MapBinder<String, String> multibinder =
MapBinder.newMapBinder(binder(), String.class, String.class);
multibinder.addBinding("a").toInstance("A");
multibinder.addBinding("b").toInstance("B1");
multibinder.addBinding("c").toInstance("C");
}
};
AbstractModule b2c =
new AbstractModule() {
@Override
protected void configure() {
MapBinder<String, String> multibinder =
MapBinder.newMapBinder(binder(), String.class, String.class);
multibinder.addBinding("b").toInstance("B2");
multibinder.addBinding("c").toInstance("C");
multibinder.permitDuplicates();
}
};
Injector injector = Guice.createInjector(ab1c, b2c);
assertEquals(
mapOf("a", setOf("A"), "b", setOf("B1", "B2"), "c", setOf("C")),
injector.getInstance(Key.get(mapOfSetOfString)));
assertMapVisitor(
Key.get(mapOfString),
stringType,
stringType,
setOf(ab1c, b2c),
BOTH,
true,
0,
instance("a", "A"),
instance("b", "B1"),
instance("b", "B2"),
instance("c", "C"));
}
public void testMapBinderMultimapWithAnotation() {
AbstractModule ab1 =
new AbstractModule() {
@Override
protected void configure() {
MapBinder<String, String> multibinder =
MapBinder.newMapBinder(binder(), String.class, String.class, Abc.class);
multibinder.addBinding("a").toInstance("A");
multibinder.addBinding("b").toInstance("B1");
}
};
AbstractModule b2c =
new AbstractModule() {
@Override
protected void configure() {
MapBinder<String, String> multibinder =
MapBinder.newMapBinder(binder(), String.class, String.class, Abc.class);
multibinder.addBinding("b").toInstance("B2");
multibinder.addBinding("c").toInstance("C");
multibinder.permitDuplicates();
}
};
Injector injector = Guice.createInjector(ab1, b2c);
assertEquals(
mapOf("a", setOf("A"), "b", setOf("B1", "B2"), "c", setOf("C")),
injector.getInstance(Key.get(mapOfSetOfString, Abc.class)));
try {
injector.getInstance(Key.get(mapOfSetOfString));
fail();
} catch (ConfigurationException expected) {
}
assertMapVisitor(
Key.get(mapOfString, Abc.class),
stringType,
stringType,
setOf(ab1, b2c),
BOTH,
true,
0,
instance("a", "A"),
instance("b", "B1"),
instance("b", "B2"),
instance("c", "C"));
}
public void testMapBinderMultimapIsUnmodifiable() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
MapBinder<String, String> mapBinder =
MapBinder.newMapBinder(binder(), String.class, String.class);
mapBinder.addBinding("a").toInstance("A");
mapBinder.permitDuplicates();
}
});
Map<String, Set<String>> map = injector.getInstance(Key.get(mapOfSetOfString));
try {
map.clear();
fail();
} catch (UnsupportedOperationException expected) {
}
try {
map.get("a").clear();
fail();
} catch (UnsupportedOperationException expected) {
}
}
public void testMapBinderMapForbidsNullKeys() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
MapBinder.newMapBinder(binder(), String.class, String.class).addBinding(null);
}
});
fail();
} catch (CreationException expected) {
}
}
public void testMapBinderMapForbidsNullValues() {
|
ValueType
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inject/dagger/AndroidInjectionBeforeSuperTest.java
|
{
"start": 6162,
"end": 6924
}
|
class ____ {
public static void inject(Activity activity) {}
public static void inject(Fragment fragment) {}
public static void inject(Service service) {}
}\
""")
.doTest();
}
@Test
public void negativeCase() {
compilationHelper
.addSourceLines(
"AndroidInjectionBeforeSuperNegativeCases.java",
"""
package com.google.errorprone.bugpatterns.inject.dagger.testdata;
import android.app.Activity;
import android.app.Fragment;
import android.app.Service;
import android.content.Intent;
import android.os.Bundle;
import android.os.IBinder;
import dagger.android.AndroidInjection;
final
|
AndroidInjection
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ResourceConditionTests.java
|
{
"start": 2693,
"end": 2893
}
|
class ____ {
@Bean
String foo() {
return "foo";
}
}
@Configuration(proxyBeanMethods = false)
@Conditional(UnknownDefaultLocationResourceCondition.class)
static
|
DefaultLocationConfiguration
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestDefaultUpgradeComponentsFinder.java
|
{
"start": 1396,
"end": 4613
}
|
class ____ {
private UpgradeComponentsFinder.DefaultUpgradeComponentsFinder finder =
new UpgradeComponentsFinder.DefaultUpgradeComponentsFinder();
@Test
public void testServiceArtifactChange() {
Service currentDef = ServiceTestUtils.createExampleApplication();
Service targetDef = ServiceTestUtils.createExampleApplication();
targetDef.getComponents().forEach(x -> x.setArtifact(
TestServiceManager.createTestArtifact("v1")));
assertEquals(targetDef.getComponents(),
finder.findTargetComponentSpecs(currentDef,
targetDef), "all components need upgrade");
}
@Test
public void testServiceUpgradeWithNewComponentAddition() {
Service currentDef = ServiceTestUtils.createExampleApplication();
Service targetDef = ServiceTestUtils.createExampleApplication();
Iterator<Component> targetComponentsIter =
targetDef.getComponents().iterator();
Component firstComponent = targetComponentsIter.next();
firstComponent.setName("newComponentA");
try {
finder.findTargetComponentSpecs(currentDef, targetDef);
fail("Expected error since component does not exist in service "
+ "definition");
} catch (UnsupportedOperationException usoe) {
assertEquals(
"addition/deletion of components not supported by upgrade. Could "
+ "not find component newComponentA in current service "
+ "definition.",
usoe.getMessage());
//Expected
}
}
@Test
public void testComponentArtifactChange() {
Service currentDef = TestServiceManager.createBaseDef("test");
Service targetDef = TestServiceManager.createBaseDef("test");
targetDef.getComponents().get(0).setArtifact(
TestServiceManager.createTestArtifact("v2"));
List<Component> expected = new ArrayList<>();
expected.add(targetDef.getComponents().get(0));
assertEquals(expected, finder.findTargetComponentSpecs(currentDef,
targetDef), "single components needs upgrade");
}
@Test
public void testChangeInConfigFileProperty() {
ConfigFile file = new ConfigFile().srcFile("src").destFile("dest")
.type(ConfigFile.TypeEnum.HADOOP_XML);
Map<String, String> props = new HashMap<>();
props.put("k1", "v1");
file.setProperties(props);
Configuration conf = new Configuration().files(Lists.newArrayList(file));
Service currentDef = TestServiceManager.createBaseDef("test");
currentDef.setConfiguration(conf);
// new spec has changes in config file property
file = new ConfigFile().srcFile("src").destFile("dest")
.type(ConfigFile.TypeEnum.HADOOP_XML);
Map<String, String> changedProps = new HashMap<>();
changedProps.put("k1", "v2");
file.setProperties(changedProps);
conf = new Configuration().files(Lists.newArrayList(file));
Service targetDef = TestServiceManager.createBaseDef("test");
targetDef.setConfiguration(conf);
List<Component> expected = new ArrayList<>();
expected.addAll(targetDef.getComponents());
assertEquals(expected, finder.findTargetComponentSpecs(currentDef, targetDef),
"all components needs upgrade");
}
}
|
TestDefaultUpgradeComponentsFinder
|
java
|
grpc__grpc-java
|
xds/src/generated/thirdparty/grpc/io/envoyproxy/envoy/service/discovery/v3/AggregatedDiscoveryServiceGrpc.java
|
{
"start": 13485,
"end": 15638
}
|
class ____
extends io.grpc.stub.AbstractBlockingStub<AggregatedDiscoveryServiceBlockingV2Stub> {
private AggregatedDiscoveryServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected AggregatedDiscoveryServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new AggregatedDiscoveryServiceBlockingV2Stub(channel, callOptions);
}
/**
* <pre>
* This is a gRPC-only API.
* </pre>
*/
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/10918")
public io.grpc.stub.BlockingClientCall<io.envoyproxy.envoy.service.discovery.v3.DiscoveryRequest, io.envoyproxy.envoy.service.discovery.v3.DiscoveryResponse>
streamAggregatedResources() {
return io.grpc.stub.ClientCalls.blockingBidiStreamingCall(
getChannel(), getStreamAggregatedResourcesMethod(), getCallOptions());
}
/**
*/
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/10918")
public io.grpc.stub.BlockingClientCall<io.envoyproxy.envoy.service.discovery.v3.DeltaDiscoveryRequest, io.envoyproxy.envoy.service.discovery.v3.DeltaDiscoveryResponse>
deltaAggregatedResources() {
return io.grpc.stub.ClientCalls.blockingBidiStreamingCall(
getChannel(), getDeltaAggregatedResourcesMethod(), getCallOptions());
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service AggregatedDiscoveryService.
* <pre>
* See https://github.com/envoyproxy/envoy-api#apis for a description of the role of
* ADS and how it is intended to be used by a management server. ADS requests
* have the same structure as their singleton xDS counterparts, but can
* multiplex many resource types on a single stream. The type_url in the
* DiscoveryRequest/DiscoveryResponse provides sufficient information to recover
* the multiplexed singleton APIs at the Envoy instance and management server.
* </pre>
*/
public static final
|
AggregatedDiscoveryServiceBlockingV2Stub
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/mapper/relation/AbstractCollectionMapper.java
|
{
"start": 1377,
"end": 12102
}
|
class ____<T> extends AbstractPropertyMapper {
protected final Configuration configuration;
protected final CommonCollectionMapperData commonCollectionMapperData;
protected final Class<? extends T> collectionClass;
protected final boolean ordinalInId;
protected final boolean revisionTypeInId;
private final Constructor<? extends T> proxyConstructor;
protected AbstractCollectionMapper(
Configuration configuration,
CommonCollectionMapperData commonCollectionMapperData,
Class<? extends T> collectionClass,
Class<? extends T> proxyClass,
boolean ordinalInId,
boolean revisionTypeInId) {
this.configuration = configuration;
this.commonCollectionMapperData = commonCollectionMapperData;
this.collectionClass = collectionClass;
this.ordinalInId = ordinalInId;
this.revisionTypeInId = revisionTypeInId;
try {
proxyConstructor = proxyClass.getConstructor( Initializor.class );
}
catch (NoSuchMethodException e) {
throw new AuditException( e );
}
}
protected abstract Collection getNewCollectionContent(PersistentCollection newCollection);
protected abstract Collection getOldCollectionContent(Serializable oldCollection);
protected abstract Set<Object> buildCollectionChangeSet(Object eventCollection, Collection collection);
/**
* Maps the changed collection element to the given map.
*
* @param idData Map to which composite-id data should be added.
* @param data Where to map the data.
* @param changed The changed collection element to map.
*/
protected abstract void mapToMapFromObject(
SharedSessionContractImplementor session,
Map<String, Object> idData,
Map<String, Object> data,
Object changed);
/**
* Creates map for storing identifier data. Ordinal parameter guarantees uniqueness of primary key.
* Composite primary key cannot contain embeddable properties since they might be nullable.
*
* @param ordinal Iteration ordinal.
*
* @return Map for holding identifier data.
*/
protected Map<String, Object> createIdMap(int ordinal) {
final Map<String, Object> idMap = new HashMap<>();
if ( ordinalInId ) {
idMap.put( configuration.getEmbeddableSetOrdinalPropertyName(), ordinal );
}
return idMap;
}
protected void addCollectionChanges(
SharedSessionContractImplementor session,
List<PersistentCollectionChangeData> collectionChanges,
Set<Object> changed,
RevisionType revisionType,
Object id) {
int ordinal = 0;
for ( Object changedObj : changed ) {
final Map<String, Object> entityData = new HashMap<>();
final Map<String, Object> originalId = createIdMap( ordinal++ );
entityData.put( configuration.getOriginalIdPropertyName(), originalId );
collectionChanges.add(
new PersistentCollectionChangeData(
commonCollectionMapperData.getVersionsMiddleEntityName(), entityData, changedObj
)
);
// Mapping the collection owner's id.
commonCollectionMapperData.getReferencingIdData().getPrefixedMapper().mapToMapFromId( originalId, id );
// Mapping collection element and index (if present).
mapToMapFromObject( session, originalId, entityData, changedObj );
final Map<String, Object> dataMap = revisionTypeInId ? originalId : entityData;
dataMap.put( configuration.getRevisionTypePropertyName(), revisionType );
}
}
@Override
@SuppressWarnings("unchecked")
public List<PersistentCollectionChangeData> mapCollectionChanges(
SharedSessionContractImplementor session,
String referencingPropertyName,
PersistentCollection newColl,
Serializable oldColl,
Object id) {
final PropertyData propertyData = commonCollectionMapperData.getCollectionReferencingPropertyData();
if ( !propertyData.getName().equals( referencingPropertyName ) ) {
return null;
}
return mapCollectionChanges( session, newColl, oldColl, id );
}
@Override
public boolean mapToMapFromEntity(
SharedSessionContractImplementor session,
Map<String, Object> data,
Object newObj,
Object oldObj) {
// Changes are mapped in the "mapCollectionChanges" method.
return false;
}
@Override
public void mapModifiedFlagsToMapFromEntity(
SharedSessionContractImplementor session,
Map<String, Object> data,
Object newObj,
Object oldObj) {
final PropertyData propertyData = commonCollectionMapperData.getCollectionReferencingPropertyData();
if ( propertyData.isUsingModifiedFlag() ) {
if ( isNotPersistentCollection( newObj ) || isNotPersistentCollection( oldObj ) ) {
// Compare POJOs.
data.put( propertyData.getModifiedFlagPropertyName(), !Objects.deepEquals( newObj, oldObj ) );
}
else if ( isFromNullToEmptyOrFromEmptyToNull( (PersistentCollection) newObj, (Serializable) oldObj ) ) {
data.put( propertyData.getModifiedFlagPropertyName(), true );
}
else {
// HHH-7949 - Performance optimization to avoid lazy-fetching collections that have
// not been changed for deriving the modified flags value.
final PersistentCollection pc = (PersistentCollection) newObj;
if ( ( pc != null && !pc.isDirty() ) || ( newObj == null && oldObj == null ) ) {
data.put( propertyData.getModifiedFlagPropertyName(), false );
return;
}
final List<PersistentCollectionChangeData> changes = mapCollectionChanges(
session,
commonCollectionMapperData.getCollectionReferencingPropertyData().getName(),
pc,
(Serializable) oldObj,
null
);
data.put( propertyData.getModifiedFlagPropertyName(), !changes.isEmpty() );
}
}
}
private boolean isNotPersistentCollection(Object obj) {
return obj != null && !(obj instanceof PersistentCollection);
}
private boolean isFromNullToEmptyOrFromEmptyToNull(PersistentCollection newColl, Serializable oldColl) {
// Comparing new and old collection content.
final Collection newCollection = getNewCollectionContent( newColl );
final Collection oldCollection = getOldCollectionContent( oldColl );
return oldCollection == null && newCollection != null && newCollection.isEmpty()
|| newCollection == null && oldCollection != null && oldCollection.isEmpty();
}
@Override
public void mapModifiedFlagsToMapForCollectionChange(String collectionPropertyName, Map<String, Object> data) {
final PropertyData propertyData = commonCollectionMapperData.getCollectionReferencingPropertyData();
if ( propertyData.isUsingModifiedFlag() ) {
data.put(
propertyData.getModifiedFlagPropertyName(),
propertyData.getName().equals( collectionPropertyName )
);
}
}
protected abstract Initializor<T> getInitializor(
EnversService enversService,
AuditReaderImplementor versionsReader,
Object primaryKey,
Number revision,
boolean removed);
protected CollectionPersister resolveCollectionPersister(
SharedSessionContractImplementor session,
PersistentCollection collection) {
// First attempt to resolve the persister from the collection entry
if ( collection != null ) {
CollectionEntry collectionEntry = session.getPersistenceContextInternal().getCollectionEntry( collection );
if ( collectionEntry != null ) {
CollectionPersister collectionPersister = collectionEntry.getCurrentPersister();
if ( collectionPersister != null ) {
return collectionPersister;
}
}
}
// Fallback to resolving the persister from the collection role
final CollectionPersister collectionPersister = session.getFactory()
.getMappingMetamodel()
.getCollectionDescriptor( commonCollectionMapperData.getRole() );
if ( collectionPersister == null ) {
throw new AuditException(
String.format(
Locale.ROOT,
"Failed to locate CollectionPersister for collection [%s]",
commonCollectionMapperData.getRole()
)
);
}
return collectionPersister;
}
/**
* Checks whether the old collection element and new collection element are the same.
* By default, this delegates to the collection persister's {@link CollectionPersister#getElementType()}.
*
* @param collectionPersister The collection persister.
* @param oldObject The collection element from the old persistent collection.
* @param newObject The collection element from the new persistent collection.
*
* @return true if the two objects are the same, false otherwise.
*/
protected boolean isSame(CollectionPersister collectionPersister, Object oldObject, Object newObject) {
return collectionPersister.getElementType().isSame( oldObject, newObject );
}
@Override
public void mapToEntityFromMap(
final EnversService enversService,
final Object obj,
final Map data,
final Object primaryKey,
final AuditReaderImplementor versionsReader,
final Number revision) {
final Object collectionProxy = mapToEntityFromMap( enversService, data, primaryKey, versionsReader, revision );
final PropertyData collectionPropertyData = commonCollectionMapperData.getCollectionReferencingPropertyData();
if ( isDynamicComponentMap() ) {
final Map<String, Object> map = (Map<String, Object>) obj;
map.put( collectionPropertyData.getBeanName(), collectionProxy );
}
else {
setValueOnObject( collectionPropertyData, obj, collectionProxy, enversService.getServiceRegistry() );
}
}
@Override
public Object mapToEntityFromMap(
EnversService enversService,
Map data,
Object primaryKey,
AuditReaderImplementor versionsReader,
Number revision) {
final String revisionTypePropertyName = enversService.getConfig().getRevisionTypePropertyName();
// construct the collection proxy
final Object collectionProxy;
try {
collectionProxy = proxyConstructor.newInstance(
getInitializor(
enversService,
versionsReader,
primaryKey,
revision,
RevisionType.DEL.equals( data.get( revisionTypePropertyName ) )
)
);
}
catch ( Exception e ) {
throw new AuditException( "Failed to construct collection proxy", e );
}
return collectionProxy;
}
/**
* Map collection changes using hash identity.
*
* @param session The session.
* @param newColl The new persistent collection.
* @param oldColl The old collection.
* @param id The owning entity identifier.
* @return the persistent collection changes.
*/
protected abstract List<PersistentCollectionChangeData> mapCollectionChanges(
SharedSessionContractImplementor session,
PersistentCollection newColl,
Serializable oldColl,
Object id);
@Override
public boolean hasPropertiesWithModifiedFlag() {
if ( commonCollectionMapperData != null ) {
final PropertyData propertyData = commonCollectionMapperData.getCollectionReferencingPropertyData();
return propertyData != null && propertyData.isUsingModifiedFlag();
}
return false;
}
}
|
AbstractCollectionMapper
|
java
|
elastic__elasticsearch
|
modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/MultiPolygonBuilder.java
|
{
"start": 1264,
"end": 7141
}
|
class ____ extends ShapeBuilder<Shape, MultiPolygon, MultiPolygonBuilder> {
public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON;
private final List<PolygonBuilder> polygons = new ArrayList<>();
private final Orientation orientation;
/**
* Build a MultiPolygonBuilder with RIGHT orientation.
*/
public MultiPolygonBuilder() {
this(Orientation.RIGHT);
}
/**
* Build a MultiPolygonBuilder with an arbitrary orientation.
*/
public MultiPolygonBuilder(Orientation orientation) {
this.orientation = orientation;
}
/**
* Read from a stream.
*/
public MultiPolygonBuilder(StreamInput in) throws IOException {
orientation = Orientation.readFrom(in);
int holes = in.readVInt();
for (int i = 0; i < holes; i++) {
polygon(new PolygonBuilder(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
orientation.writeTo(out);
out.writeCollection(polygons);
}
public Orientation orientation() {
return this.orientation;
}
/**
* Add a shallow copy of the polygon to the multipolygon. This will apply the orientation of the
* {@link MultiPolygonBuilder} to the polygon if polygon has different orientation.
*/
public MultiPolygonBuilder polygon(PolygonBuilder polygon) {
PolygonBuilder pb = new PolygonBuilder(new CoordinatesBuilder().coordinates(polygon.shell().coordinates(false)), this.orientation);
for (LineStringBuilder hole : polygon.holes()) {
pb.hole(hole);
}
this.polygons.add(pb);
return this;
}
/**
* get the list of polygons
*/
public List<PolygonBuilder> polygons() {
return polygons;
}
private static String polygonCoordinatesToWKT(PolygonBuilder polygon) {
StringBuilder sb = new StringBuilder();
sb.append(GeoWKTParser.LPAREN);
sb.append(ShapeBuilder.coordinateListToWKT(polygon.shell().coordinates));
for (LineStringBuilder hole : polygon.holes()) {
sb.append(GeoWKTParser.COMMA);
sb.append(ShapeBuilder.coordinateListToWKT(hole.coordinates));
}
sb.append(GeoWKTParser.RPAREN);
return sb.toString();
}
@Override
protected StringBuilder contentToWKT() {
final StringBuilder sb = new StringBuilder();
if (polygons.isEmpty()) {
sb.append(GeoWKTParser.EMPTY);
} else {
sb.append(GeoWKTParser.LPAREN);
if (polygons.size() > 0) {
sb.append(polygonCoordinatesToWKT(polygons.get(0)));
}
for (int i = 1; i < polygons.size(); ++i) {
sb.append(GeoWKTParser.COMMA);
sb.append(polygonCoordinatesToWKT(polygons.get(i)));
}
sb.append(GeoWKTParser.RPAREN);
}
return sb;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName());
builder.field(ShapeParser.FIELD_ORIENTATION.getPreferredName(), orientation.name().toLowerCase(Locale.ROOT));
builder.startArray(ShapeParser.FIELD_COORDINATES.getPreferredName());
for (PolygonBuilder polygon : polygons) {
builder.startArray();
polygon.coordinatesArray(builder, params);
builder.endArray();
}
builder.endArray();
return builder.endObject();
}
@Override
public GeoShapeType type() {
return TYPE;
}
@Override
public int numDimensions() {
if (polygons == null || polygons.isEmpty()) {
throw new IllegalStateException("unable to get number of dimensions, " + "Polygons have not yet been initialized");
}
return polygons.get(0).numDimensions();
}
@Override
public Shape buildS4J() {
List<Shape> shapes = new ArrayList<>(this.polygons.size());
if (wrapdateline) {
for (PolygonBuilder polygon : this.polygons) {
for (Coordinate[][] part : polygon.coordinates()) {
shapes.add(jtsGeometry(PolygonBuilder.polygonS4J(FACTORY, part)));
}
}
} else {
for (PolygonBuilder polygon : this.polygons) {
shapes.add(jtsGeometry(polygon.toPolygonS4J(FACTORY)));
}
}
if (shapes.size() == 1) return shapes.get(0);
else return new XShapeCollection<>(shapes, SPATIAL_CONTEXT);
// note: ShapeCollection is probably faster than a Multi* geom.
}
@SuppressWarnings({ "unchecked" })
@Override
public MultiPolygon buildGeometry() {
List<Polygon> shapes = new ArrayList<>(this.polygons.size());
Object poly;
for (PolygonBuilder polygon : this.polygons) {
poly = polygon.buildGeometry();
if (poly instanceof List) {
shapes.addAll((List<Polygon>) poly);
} else {
shapes.add((Polygon) poly);
}
}
if (shapes.isEmpty()) {
return MultiPolygon.EMPTY;
}
return new MultiPolygon(shapes);
}
@Override
public int hashCode() {
return Objects.hash(polygons, orientation);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
MultiPolygonBuilder other = (MultiPolygonBuilder) obj;
return Objects.equals(polygons, other.polygons) && Objects.equals(orientation, other.orientation);
}
}
|
MultiPolygonBuilder
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/servlet/setup/MockMvcConfigurer.java
|
{
"start": 1082,
"end": 1556
}
|
interface ____ be plugged in via
* {@link ConfigurableMockMvcBuilder#apply} with instances of this type likely
* created via static methods, for example:
*
* <pre class="code">
* import static org.example.ExampleSetup.mySetup;
*
* // ...
*
* MockMvcBuilders.webAppContextSetup(context).apply(mySetup("foo","bar")).build();
* </pre>
*
* @author Rossen Stoyanchev
* @since 4.1
* @see org.springframework.test.web.servlet.setup.MockMvcConfigurerAdapter
*/
public
|
can
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/util/DruidDataSourceUtils.java
|
{
"start": 1335,
"end": 15871
}
|
class ____ {
private static final Log LOG = LogFactory.getLog(DruidDataSourceUtils.class);
public static String getUrl(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getUrl();
}
try {
Method method = druidDataSource.getClass().getMethod("getUrl");
Object obj = method.invoke(druidDataSource);
return (String) obj;
} catch (Exception e) {
LOG.error("getUrl error", e);
return null;
}
}
public static long getID(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getID();
}
try {
Method method = druidDataSource.getClass().getMethod("getID");
Object obj = method.invoke(druidDataSource);
return (Long) obj;
} catch (Exception e) {
LOG.error("getID error", e);
return -1;
}
}
public static String getName(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getName();
}
try {
Method method = druidDataSource.getClass().getMethod("getName");
Object obj = method.invoke(druidDataSource);
return (String) obj;
} catch (Exception e) {
LOG.error("getUrl error", e);
return null;
}
}
public static ObjectName getObjectName(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getObjectName();
}
try {
Method method = druidDataSource.getClass().getMethod("getObjectName");
Object obj = method.invoke(druidDataSource);
return (ObjectName) obj;
} catch (Exception e) {
LOG.error("getObjectName error", e);
return null;
}
}
public static Object getSqlStat(Object druidDataSource, int sqlId) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getSqlStat(sqlId);
}
try {
Method method = druidDataSource.getClass().getMethod("getSqlStat", int.class);
return method.invoke(druidDataSource, sqlId);
} catch (Exception e) {
LOG.error("getSqlStat error", e);
return null;
}
}
public static boolean isRemoveAbandoned(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).isRemoveAbandoned();
}
try {
Method method = druidDataSource.getClass().getMethod("isRemoveAbandoned");
Object obj = method.invoke(druidDataSource);
return (Boolean) obj;
} catch (Exception e) {
LOG.error("isRemoveAbandoned error", e);
return false;
}
}
@SuppressWarnings("unchecked")
public static Map<String, Object> getStatDataForMBean(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getStatDataForMBean();
}
try {
Method method = druidDataSource.getClass().getMethod("getStatDataForMBean");
Object obj = method.invoke(druidDataSource);
return (Map<String, Object>) obj;
} catch (Exception e) {
LOG.error("getStatDataForMBean error", e);
return null;
}
}
@SuppressWarnings("unchecked")
public static Map<String, Object> getStatData(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getStatData();
}
try {
Method method = druidDataSource.getClass().getMethod("getStatData");
Object obj = method.invoke(druidDataSource);
return (Map<String, Object>) obj;
} catch (Exception e) {
LOG.error("getStatData error", e);
return null;
}
}
@SuppressWarnings("rawtypes")
public static Map getSqlStatMap(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getSqlStatMap();
}
try {
Method method = druidDataSource.getClass().getMethod("getSqlStatMap");
Object obj = method.invoke(druidDataSource);
return (Map) obj;
} catch (Exception e) {
LOG.error("getSqlStatMap error", e);
return null;
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
public static Map<String, Object> getWallStatMap(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getWallStatMap();
}
try {
Method method = druidDataSource.getClass().getMethod("getWallStatMap");
Object obj = method.invoke(druidDataSource);
return (Map) obj;
} catch (Exception e) {
LOG.error("getWallStatMap error", e);
return null;
}
}
@SuppressWarnings("unchecked")
public static List<Map<String, Object>> getPoolingConnectionInfo(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getPoolingConnectionInfo();
}
try {
Method method = druidDataSource.getClass().getMethod("getPoolingConnectionInfo");
Object obj = method.invoke(druidDataSource);
return (List<Map<String, Object>>) obj;
} catch (Exception e) {
LOG.error("getPoolingConnectionInfo error", e);
return null;
}
}
@SuppressWarnings("unchecked")
public static List<String> getActiveConnectionStackTrace(Object druidDataSource) {
if (druidDataSource.getClass() == DruidDataSource.class) {
return ((DruidDataSource) druidDataSource).getActiveConnectionStackTrace();
}
try {
Method method = druidDataSource.getClass().getMethod("getActiveConnectionStackTrace");
Object obj = method.invoke(druidDataSource);
return (List<String>) obj;
} catch (Exception e) {
LOG.error("getActiveConnectionStackTrace error", e);
return null;
}
}
public static Integer getPropertyInt(Properties properties, String key) {
String value = properties.getProperty(key);
if (value != null) {
try {
return Integer.parseInt(value);
} catch (NumberFormatException ignored) {
// ignore
}
}
return null;
}
public static void configFromProperties(DruidDataSource druidDataSource, Properties properties) {
trySetStringProperty(properties, "druid.name", druidDataSource::setName);
trySetStringProperty(properties, "druid.url", druidDataSource::setUrl);
trySetStringProperty(properties, "druid.username", druidDataSource::setUsername);
trySetStringProperty(properties, "druid.password", druidDataSource::setPassword);
trySetBooleanProperty(properties, "druid.testWhileIdle", druidDataSource::setTestWhileIdle);
trySetBooleanProperty(properties, "druid.testOnBorrow", druidDataSource::setTestOnBorrow);
{
String property = properties.getProperty("druid.validationQuery");
if (property != null && property.length() > 0) {
druidDataSource.setValidationQuery(property);
}
}
trySetBooleanProperty(properties, "druid.useGlobalDataSourceStat", druidDataSource::setUseGlobalDataSourceStat);
trySetBooleanProperty(properties, "druid.useGloalDataSourceStat", druidDataSource::setUseGlobalDataSourceStat);
trySetBooleanProperty(properties, "druid.asyncInit", druidDataSource::setAsyncInit);
{
String property = properties.getProperty("druid.filters");
if (property != null && property.length() > 0) {
try {
druidDataSource.setFilters(property);
} catch (SQLException e) {
LOG.error("setFilters error", e);
}
}
}
trySetLongProperty(properties, Constants.DRUID_TIME_BETWEEN_LOG_STATS_MILLIS, druidDataSource::setTimeBetweenLogStatsMillis);
{
String property = properties.getProperty(Constants.DRUID_STAT_SQL_MAX_SIZE);
if (property != null && property.length() > 0) {
try {
int value = Integer.parseInt(property);
if (druidDataSource.getDataSourceStat() != null) {
druidDataSource.getDataSourceStat().setMaxSqlSize(value);
}
} catch (NumberFormatException e) {
LOG.error("illegal property '" + Constants.DRUID_STAT_SQL_MAX_SIZE + "'", e);
}
}
}
trySetBooleanProperty(properties, "druid.clearFiltersEnable", druidDataSource::setClearFiltersEnable);
trySetBooleanProperty(properties, "druid.resetStatEnable", druidDataSource::setResetStatEnable);
trySetIntProperty(properties, "druid.notFullTimeoutRetryCount", druidDataSource::setNotFullTimeoutRetryCount);
trySetLongProperty(properties, "druid.timeBetweenEvictionRunsMillis", druidDataSource::setTimeBetweenEvictionRunsMillis);
trySetIntProperty(properties, "druid.maxWaitThreadCount", druidDataSource::setMaxWaitThreadCount);
trySetIntProperty(properties, "druid.maxWait", druidDataSource::setMaxWait);
trySetBooleanProperty(properties, "druid.failFast", druidDataSource::setFailFast);
trySetLongProperty(properties, "druid.phyTimeoutMillis", druidDataSource::setPhyTimeoutMillis);
trySetLongProperty(properties, "druid.phyMaxUseCount", druidDataSource::setPhyMaxUseCount);
trySetLongProperty(properties, "druid.minEvictableIdleTimeMillis", druidDataSource::setMinEvictableIdleTimeMillis);
trySetLongProperty(properties, "druid.maxEvictableIdleTimeMillis", druidDataSource::setMaxEvictableIdleTimeMillis);
trySetBooleanProperty(properties, "druid.keepAlive", druidDataSource::setKeepAlive);
trySetLongProperty(properties, "druid.keepAliveBetweenTimeMillis", druidDataSource::setKeepAliveBetweenTimeMillis);
trySetBooleanProperty(properties, "druid.poolPreparedStatements", druidDataSource::setPoolPreparedStatements);
trySetBooleanProperty(properties, "druid.initVariants", druidDataSource::setInitVariants);
trySetBooleanProperty(properties, "druid.initGlobalVariants", druidDataSource::setInitGlobalVariants);
trySetBooleanProperty(properties, "druid.useUnfairLock", druidDataSource::setUseUnfairLock);
trySetStringProperty(properties, "druid.driverClassName", druidDataSource::setDriverClassName);
trySetIntProperty(properties, "druid.initialSize", druidDataSource::setInitialSize);
trySetIntProperty(properties, "druid.minIdle", druidDataSource::setMinIdle);
trySetIntProperty(properties, "druid.maxActive", druidDataSource::setMaxActive);
trySetBooleanProperty(properties, "druid.killWhenSocketReadTimeout", druidDataSource::setKillWhenSocketReadTimeout);
trySetStringProperty(properties, "druid.connectProperties", druidDataSource::setConnectionProperties);
trySetIntProperty(properties, "druid.maxPoolPreparedStatementPerConnectionSize",
druidDataSource::setMaxPoolPreparedStatementPerConnectionSize);
{
String property = properties.getProperty("druid.initConnectionSqls");
if (property != null && property.length() > 0) {
try {
StringTokenizer tokenizer = new StringTokenizer(property, ";");
druidDataSource.setConnectionInitSqls(Collections.list(tokenizer));
} catch (NumberFormatException e) {
LOG.error("illegal property 'druid.initConnectionSqls'", e);
}
}
}
{
String property = System.getProperty("druid.load.spifilter.skip");
if (property != null && !"false".equals(property)) {
druidDataSource.setLoadSpifilterSkip(true);
}
}
{
String property = System.getProperty("druid.checkExecuteTime");
if (property != null && !"false".equals(property)) {
druidDataSource.setCheckExecuteTime(true);
}
}
// new added
trySetIntProperty(properties, "druid.connectionErrorRetryAttempts", druidDataSource::setConnectionErrorRetryAttempts);
trySetLongProperty(properties, "druid.timeBetweenConnectErrorMillis", druidDataSource::setTimeBetweenConnectErrorMillis);
trySetBooleanProperty(properties, "druid.breakAfterAcquireFailure", druidDataSource::setBreakAfterAcquireFailure);
trySetBooleanProperty(properties, "druid.testOnReturn", druidDataSource::setTestOnReturn);
trySetBooleanProperty(properties, "druid.removeAbandoned", druidDataSource::setRemoveAbandoned);
trySetBooleanProperty(properties, "druid.logAbandoned", druidDataSource::setLogAbandoned);
trySetLongProperty(properties, "druid.removeAbandonedTimeoutMillis", druidDataSource::setRemoveAbandonedTimeoutMillis);
trySetIntProperty(properties, "druid.validationQueryTimeout", druidDataSource::setValidationQueryTimeout);
trySetIntProperty(properties, "druid.queryTimeout", druidDataSource::setQueryTimeout);
trySetIntProperty(properties, "druid.connectTimeout", druidDataSource::setConnectTimeout);
trySetIntProperty(properties, "druid.socketTimeout", druidDataSource::setSocketTimeout);
trySetIntProperty(properties, "druid.transactionQueryTimeout", druidDataSource::setTransactionQueryTimeout);
trySetIntProperty(properties, "druid.loginTimeout", druidDataSource::setLoginTimeout);
}
}
|
DruidDataSourceUtils
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java
|
{
"start": 1824,
"end": 5039
}
|
class ____ extends AcknowledgedTransportMasterNodeAction<PutIndexTemplateRequest> {
public static final ActionType<AcknowledgedResponse> TYPE = new ActionType<>("indices:admin/template/put");
private static final Logger logger = LogManager.getLogger(TransportPutIndexTemplateAction.class);
private final MetadataIndexTemplateService indexTemplateService;
private final IndexScopedSettings indexScopedSettings;
private final ProjectResolver projectResolver;
@Inject
public TransportPutIndexTemplateAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
MetadataIndexTemplateService indexTemplateService,
ActionFilters actionFilters,
ProjectResolver projectResolver,
IndexScopedSettings indexScopedSettings
) {
super(
TYPE.name(),
transportService,
clusterService,
threadPool,
actionFilters,
PutIndexTemplateRequest::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.indexTemplateService = indexTemplateService;
this.indexScopedSettings = indexScopedSettings;
this.projectResolver = projectResolver;
}
@Override
protected ClusterBlockException checkBlock(PutIndexTemplateRequest request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
@Override
protected void masterOperation(
Task task,
final PutIndexTemplateRequest request,
final ClusterState state,
final ActionListener<AcknowledgedResponse> listener
) throws IOException {
String cause = request.cause();
if (cause.length() == 0) {
cause = "api";
}
final Settings.Builder templateSettingsBuilder = Settings.builder();
templateSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetadata.INDEX_SETTING_PREFIX);
indexScopedSettings.validate(templateSettingsBuilder.build(), true); // templates must be consistent with regards to dependencies
indexTemplateService.putTemplate(
projectResolver.getProjectId(),
new MetadataIndexTemplateService.PutRequest(cause, request.name()).patterns(request.patterns())
.order(request.order())
.settings(templateSettingsBuilder.build())
.mappings(request.mappings() == null ? null : new CompressedXContent(request.mappings()))
.aliases(request.aliases())
.create(request.create())
.version(request.version()),
request.masterNodeTimeout(),
new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse response) {
listener.onResponse(response);
}
@Override
public void onFailure(Exception e) {
logger.debug(() -> "failed to put template [" + request.name() + "]", e);
listener.onFailure(e);
}
}
);
}
}
|
TransportPutIndexTemplateAction
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/util/InstantiatorTests.java
|
{
"start": 1381,
"end": 5835
}
|
class ____ {
private final ParamA paramA = new ParamA();
private final ParamB paramB = new ParamB();
private @Nullable ParamC paramC;
@Test
void instantiateWhenOnlyDefaultConstructorCreatesInstance() {
WithDefaultConstructor instance = createInstance(WithDefaultConstructor.class);
assertThat(instance).isInstanceOf(WithDefaultConstructor.class);
}
@Test
void instantiateWhenMultipleConstructorPicksMostArguments() {
WithMultipleConstructors instance = createInstance(WithMultipleConstructors.class);
assertThat(instance).isInstanceOf(WithMultipleConstructors.class);
}
@Test
void instantiateWhenAdditionalConstructorPicksMostSuitable() {
WithAdditionalConstructor instance = createInstance(WithAdditionalConstructor.class);
assertThat(instance).isInstanceOf(WithAdditionalConstructor.class);
}
@Test
void instantiateOrdersInstances() {
List<Object> instances = createInstantiator(Object.class).instantiate(
Arrays.asList(WithMultipleConstructors.class.getName(), WithAdditionalConstructor.class.getName()));
assertThat(instances).hasSize(2);
assertThat(instances.get(0)).isInstanceOf(WithAdditionalConstructor.class);
assertThat(instances.get(1)).isInstanceOf(WithMultipleConstructors.class);
}
@Test
void instantiateWithFactory() {
assertThat(this.paramC).isNull();
WithFactory instance = createInstance(WithFactory.class);
assertThat(instance.getParamC()).isEqualTo(this.paramC);
}
@Test
void instantiateTypesCreatesInstance() {
WithDefaultConstructor instance = createInstantiator(WithDefaultConstructor.class)
.instantiateTypes(Collections.singleton(WithDefaultConstructor.class))
.get(0);
assertThat(instance).isInstanceOf(WithDefaultConstructor.class);
}
@Test
void instantiateWithClassLoaderCreatesInstance() {
OverridingClassLoader classLoader = new OverridingClassLoader(getClass().getClassLoader()) {
@Override
protected boolean isEligibleForOverriding(String className) {
return super.isEligibleForOverriding(className)
&& className.equals(WithDefaultConstructorSubclass.class.getName());
}
};
WithDefaultConstructor instance = createInstantiator(WithDefaultConstructor.class)
.instantiate(classLoader, Collections.singleton(WithDefaultConstructorSubclass.class.getName()))
.get(0);
assertThat(instance.getClass().getClassLoader()).isSameAs(classLoader);
}
@Test
void createWhenWrongTypeThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> createInstantiator(WithDefaultConstructor.class)
.instantiate(Collections.singleton(WithAdditionalConstructor.class.getName())))
.withMessageContaining("Unable to instantiate");
}
@Test
void createWithFailureHandlerInvokesFailureHandler() {
assertThatIllegalStateException()
.isThrownBy(() -> new Instantiator<>(WithDefaultConstructor.class, (availableParameters) -> {
}, new CustomFailureHandler())
.instantiate(Collections.singleton(WithAdditionalConstructor.class.getName())))
.withMessageContaining("custom failure handler message");
}
@Test
void instantiateWithSingleNameCreatesInstance() {
WithDefaultConstructor instance = createInstantiator(WithDefaultConstructor.class)
.instantiate(WithDefaultConstructor.class.getName());
assertThat(instance).isInstanceOf(WithDefaultConstructor.class);
}
@Test
void getArgReturnsArg() {
Instantiator<?> instantiator = createInstantiator(WithMultipleConstructors.class);
assertThat(instantiator.getArg(ParamA.class)).isSameAs(this.paramA);
assertThat(instantiator.getArg(ParamB.class)).isSameAs(this.paramB);
assertThat(instantiator.getArg(ParamC.class)).isInstanceOf(ParamC.class);
}
@Test
void getArgWhenUnknownThrowsException() {
Instantiator<?> instantiator = createInstantiator(WithMultipleConstructors.class);
assertThatIllegalStateException().isThrownBy(() -> instantiator.getArg(InputStream.class))
.withMessageStartingWith("Unknown argument type");
}
private <T> T createInstance(Class<T> type) {
T instance = createInstantiator(type).instantiate(type.getName());
assertThat(instance).isNotNull();
return instance;
}
private <T> Instantiator<T> createInstantiator(Class<T> type) {
return new Instantiator<>(type, (availableParameters) -> {
availableParameters.add(ParamA.class, this.paramA);
availableParameters.add(ParamB.class, this.paramB);
availableParameters.add(ParamC.class, ParamC::new);
});
}
static
|
InstantiatorTests
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/db2/DB2SelectTest_29.java
|
{
"start": 1100,
"end": 3099
}
|
class ____ extends DB2Test {
public void test_0() throws Exception {
String sql = "select to_char(current date - 7 day ,'yyyymmdd') from sysibm.dual";
DB2StatementParser parser = new DB2StatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals("SELECT to_char(CURRENT DATE - 7 DAYS, 'yyyymmdd')\n" +
"FROM sysibm.dual", stmt.getSelect().toString());
assertEquals("sysibm.dual", stmt.getSelect().getQueryBlock().getFrom().toString());
assertEquals(1, statementList.size());
DB2SchemaStatVisitor visitor = new DB2SchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("sysibm.dual")));
// assertTrue(visitor.getColumns().contains(new Column("DSN8B10.EMP", "WORKDEPT")));
// assertTrue(visitor.getColumns().contains(new Column("mytable", "first_name")));
// assertTrue(visitor.getColumns().contains(new Column("mytable", "full_name")));
assertEquals("SELECT to_char(CURRENT DATE - 7 DAYS, 'yyyymmdd')\n" +
"FROM sysibm.dual", //
SQLUtils.toSQLString(stmt, JdbcConstants.DB2));
assertEquals("select to_char(CURRENT DATE - 7 days, 'yyyymmdd')\n" +
"from sysibm.dual", //
SQLUtils.toSQLString(stmt, JdbcConstants.DB2, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
}
}
|
DB2SelectTest_29
|
java
|
spring-projects__spring-boot
|
module/spring-boot-batch-jdbc/src/test/java/org/springframework/boot/batch/jdbc/autoconfigure/BatchJdbcAutoConfigurationTests.java
|
{
"start": 31201,
"end": 31347
}
|
class ____ extends DefaultBatchConfiguration {
}
@EnableBatchProcessing
@Configuration(proxyBeanMethods = false)
static
|
CustomBatchConfiguration
|
java
|
junit-team__junit5
|
junit-vintage-engine/src/test/java/org/junit/vintage/engine/JUnit4ParameterizedTests.java
|
{
"start": 2433,
"end": 2703
}
|
class ____ implements TestExecutionListener {
@Override
public void executionFinished(TestIdentifier identifier, TestExecutionResult result) {
if (identifier.isTest()) {
callCounts.merge(result.getStatus(), 1, Integer::sum);
}
}
}
}
|
StatusTrackingListener
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/SpelCompilationCoverageTests.java
|
{
"start": 253595,
"end": 253695
}
|
class ____ {
double four = 0.04d;
public double getFour() {
return four;
}
}
public
|
Three
|
java
|
bumptech__glide
|
instrumentation/src/androidTest/java/com/bumptech/glide/LoadVideoResourceTest.java
|
{
"start": 974,
"end": 9018
}
|
class ____ {
@Rule public final TearDownGlide tearDownGlide = new TearDownGlide();
private final ConcurrencyHelper concurrency = new ConcurrencyHelper();
private Context context;
@Before
public void setUp() throws IOException {
MockitoAnnotations.initMocks(this);
context = ApplicationProvider.getApplicationContext();
}
@Test
public void loadVideoResourceId_fromInt_decodesFrame() {
Drawable frame = concurrency.get(Glide.with(context).load(ResourceIds.raw.video).submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceId_fromInt_withFrameTime_decodesFrame() {
Drawable frame =
concurrency.get(
GlideApp.with(context)
.load(ResourceIds.raw.video)
.frame(TimeUnit.SECONDS.toMicros(1))
.submit());
assertThat(frame).isNotNull();
}
// Testing boxed integer.
@SuppressWarnings("UnnecessaryBoxing")
@Test
public void loadVideoResourceId_fromInteger_decodesFrame() {
Drawable frame =
concurrency.get(Glide.with(context).load(new Integer(ResourceIds.raw.video)).submit());
assertThat(frame).isNotNull();
}
// Testing boxed integer.
@SuppressWarnings("UnnecessaryBoxing")
@Test
public void loadVideoResourceId_fromInteger_withFrameTime_decodesFrame() {
Drawable frame =
concurrency.get(
GlideApp.with(context)
.load(new Integer(ResourceIds.raw.video))
.frame(TimeUnit.SECONDS.toMicros(1))
.submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceId_asBitmap_decodesFrame() {
Bitmap frame =
concurrency.get(Glide.with(context).asBitmap().load(ResourceIds.raw.video).submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceId_asBitmap_withFrameTime_decodesFrame() {
Bitmap frame =
concurrency.get(
GlideApp.with(context)
.asBitmap()
.load(ResourceIds.raw.video)
.frame(TimeUnit.SECONDS.toMicros(1))
.submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUri_fromId_decodesFrame() {
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(context.getPackageName())
.path(String.valueOf(ResourceIds.raw.video))
.build();
Drawable frame = concurrency.get(GlideApp.with(context).load(uri).submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUri_asBitmap_fromId_decodesFrame() {
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(context.getPackageName())
.path(String.valueOf(ResourceIds.raw.video))
.build();
Bitmap frame = concurrency.get(GlideApp.with(context).asBitmap().load(uri).submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUri_fromId_withFrame_decodesFrame() {
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(context.getPackageName())
.path(String.valueOf(ResourceIds.raw.video))
.build();
Bitmap frame =
concurrency.get(
GlideApp.with(context)
.asBitmap()
.load(uri)
.frame(TimeUnit.SECONDS.toMicros(1))
.submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUriString_fromId_decodesFrame() {
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(context.getPackageName())
.path(String.valueOf(ResourceIds.raw.video))
.build();
Bitmap frame = concurrency.get(GlideApp.with(context).asBitmap().load(uri.toString()).submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUriString_fromId_withFrame_decodesFrame() {
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(context.getPackageName())
.path(String.valueOf(ResourceIds.raw.video))
.build();
Bitmap frame =
concurrency.get(
GlideApp.with(context)
.asBitmap()
.load(uri.toString())
.frame(TimeUnit.SECONDS.toMicros(1))
.submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUri_fromName_decodesFrame() {
Resources resources = context.getResources();
int resourceId = ResourceIds.raw.video;
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(resources.getResourcePackageName(resourceId))
.appendPath(resources.getResourceTypeName(resourceId))
.appendPath(resources.getResourceEntryName(resourceId))
.build();
Drawable frame = concurrency.get(GlideApp.with(context).load(uri).submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUri_asBitmap_fromName_decodesFrame() {
Resources resources = context.getResources();
int resourceId = ResourceIds.raw.video;
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(resources.getResourcePackageName(resourceId))
.appendPath(resources.getResourceTypeName(resourceId))
.appendPath(resources.getResourceEntryName(resourceId))
.build();
Bitmap frame = concurrency.get(GlideApp.with(context).asBitmap().load(uri).submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUri_fromName_withFrame_decodesFrame() {
Resources resources = context.getResources();
int resourceId = ResourceIds.raw.video;
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(resources.getResourcePackageName(resourceId))
.appendPath(resources.getResourceTypeName(resourceId))
.appendPath(resources.getResourceEntryName(resourceId))
.build();
Bitmap frame =
concurrency.get(
GlideApp.with(context)
.asBitmap()
.load(uri)
.frame(TimeUnit.SECONDS.toMicros(1))
.submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUriString_fromName_decodesFrame() {
Resources resources = context.getResources();
int resourceId = ResourceIds.raw.video;
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(resources.getResourcePackageName(resourceId))
.appendPath(resources.getResourceTypeName(resourceId))
.appendPath(resources.getResourceEntryName(resourceId))
.build();
Bitmap frame = concurrency.get(GlideApp.with(context).asBitmap().load(uri.toString()).submit());
assertThat(frame).isNotNull();
}
@Test
public void loadVideoResourceUriString_fromName_withFrame_decodesFrame() {
Resources resources = context.getResources();
int resourceId = ResourceIds.raw.video;
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(resources.getResourcePackageName(resourceId))
.appendPath(resources.getResourceTypeName(resourceId))
.appendPath(resources.getResourceEntryName(resourceId))
.build();
Bitmap frame =
concurrency.get(
GlideApp.with(context)
.asBitmap()
.load(uri.toString())
.frame(TimeUnit.SECONDS.toMicros(1))
.submit());
assertThat(frame).isNotNull();
}
}
|
LoadVideoResourceTest
|
java
|
apache__flink
|
flink-test-utils-parent/flink-connector-test-utils/src/main/java/org/apache/flink/connector/testframe/external/DefaultContainerizedExternalSystem.java
|
{
"start": 2415,
"end": 3495
}
|
class ____<C extends GenericContainer<C>> {
private C container;
private GenericContainer<?> flinkContainer;
public <T extends GenericContainer<T>> Builder<T> fromContainer(T container) {
@SuppressWarnings("unchecked")
Builder<T> self = (Builder<T>) this;
self.container = container;
return self;
}
public Builder<C> bindWithFlinkContainer(GenericContainer<?> flinkContainer) {
this.flinkContainer = flinkContainer;
container.dependsOn(flinkContainer).withNetwork(flinkContainer.getNetwork());
return this;
}
public DefaultContainerizedExternalSystem<C> build() {
if (flinkContainer == null) {
LOG.warn(
"External system container is not bound with Flink container. "
+ "This might lead to network isolation between external system and Flink");
}
return new DefaultContainerizedExternalSystem<>(container);
}
}
}
|
Builder
|
java
|
quarkusio__quarkus
|
integration-tests/grpc-interceptors/src/main/java/io/quarkus/grpc/examples/interceptors/EarlyHeaderServerInterceptor.java
|
{
"start": 370,
"end": 1252
}
|
class ____ implements ServerInterceptor {
private static final Metadata.Key<String> HEADER = Metadata.Key.of("xx-acme-header", Metadata.ASCII_STRING_MARSHALLER);
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call, Metadata headers,
ServerCallHandler<ReqT, RespT> next) {
ServerCallDiscardingHeaders<ReqT, RespT> wrappedServerCall = new ServerCallDiscardingHeaders<>(call);
ServerCall.Listener<ReqT> serverCallListener = next.startCall(wrappedServerCall, headers);
if (wrappedServerCall.isClosed()) {
return new ServerCall.Listener<>() {
};
}
Metadata metadata = new Metadata();
metadata.put(HEADER, "whatever");
call.sendHeaders(metadata);
return serverCallListener;
}
private static
|
EarlyHeaderServerInterceptor
|
java
|
apache__camel
|
components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientComponent.java
|
{
"start": 1389,
"end": 2918
}
|
class ____ extends AbstractIecComponent<ClientConnectionMultiplexor, ClientOptions> {
public ClientComponent(final CamelContext context) {
super(ClientOptions.class, new ClientOptions(), context);
}
public ClientComponent() {
super(ClientOptions.class, new ClientOptions());
}
@Override
protected void applyDataModuleOptions(final ClientOptions options, final Map<String, Object> parameters) {
if (parameters.get(Constants.PARAM_DATA_MODULE_OPTIONS) instanceof DataModuleOptions) {
options.setDataModuleOptions((DataModuleOptions) parameters.get(Constants.PARAM_DATA_MODULE_OPTIONS));
}
}
@Override
protected Endpoint createEndpoint(
final String uri, final ClientConnectionMultiplexor connection, final ObjectAddress address) {
return new ClientEndpoint(uri, this, connection, address);
}
@Override
protected ClientConnectionMultiplexor createConnection(final ConnectionId id, final ClientOptions options) {
return new ClientConnectionMultiplexor(new ClientConnection(id.getHost(), id.getPort(), options));
}
/**
* Default connection options
*/
@Metadata
@Override
public void setDefaultConnectionOptions(final ClientOptions defaultConnectionOptions) {
super.setDefaultConnectionOptions(defaultConnectionOptions);
}
@Override
public ClientOptions getDefaultConnectionOptions() {
return super.getDefaultConnectionOptions();
}
}
|
ClientComponent
|
java
|
hibernate__hibernate-orm
|
hibernate-spatial/src/test/java/org/hibernate/spatial/testing/RequiresFunction.java
|
{
"start": 823,
"end": 946
}
|
interface ____ {
/**
* The key for the function (as used in the SqmFunctionRegistry)
*/
String key();
}
|
RequiresFunction
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/datavalidation/groups/FinalValidation.java
|
{
"start": 718,
"end": 786
}
|
interface ____ extends Default {} // <1>
//end::clazz[]
|
FinalValidation
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/DefaultJSONParserTest2.java
|
{
"start": 704,
"end": 2533
}
|
class ____ {
}
public void test_0() throws Exception {
String text = "{}";
Map map = (Map) JSON.parse(text);
Assert.assertEquals(0, map.size());
}
public void test_1() throws Exception {
JSONException error = null;
try {
String text = "{}a";
Map map = (Map) JSON.parse(text);
Assert.assertEquals(0, map.size());
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_2() throws Exception {
JSONException error = null;
try {
DefaultJSONParser parser = new DefaultJSONParser("{'a'3}");
parser.config(Feature.AllowSingleQuotes, true);
parser.parse();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_3() throws Exception {
JSONException error = null;
try {
DefaultJSONParser parser = new DefaultJSONParser("{a 3}");
parser.config(Feature.AllowUnQuotedFieldNames, true);
parser.parse();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_4() throws Exception {
JSONException error = null;
try {
DefaultJSONParser parser = new DefaultJSONParser("{");
parser.config(Feature.AllowUnQuotedFieldNames, true);
parser.parse();
} catch (JSONException e) {
error = e;
}
Assert.assertNotNull(error);
}
public void test_5() throws Exception {
DefaultJSONParser parser = new DefaultJSONParser("{}");
Map map = parser.parseObject();
Assert.assertEquals(0, map.size());
}
}
|
Entity
|
java
|
quarkusio__quarkus
|
independent-projects/tools/devtools-testing/src/test/resources/__snapshots__/SpringWebCodestartTest/testContent/src_test_java_ilove_quark_us_GreetingControllerIT.java
|
{
"start": 110,
"end": 223
}
|
class ____ extends GreetingControllerTest {
// Execute the same tests but in native mode.
}
|
GreetingControllerIT
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/util/AutoPopulatingListTests.java
|
{
"start": 3082,
"end": 3306
}
|
class ____ implements AutoPopulatingList.ElementFactory<Object> {
@Override
public Object createElement(int index) {
TestObject bean = new TestObject();
bean.setAge(index);
return bean;
}
}
}
|
MockElementFactory
|
java
|
apache__maven
|
compat/maven-model-builder/src/main/java/org/apache/maven/model/locator/DefaultModelLocator.java
|
{
"start": 1128,
"end": 1308
}
|
class ____ implements ModelLocator {
@Override
public File locatePom(File projectDirectory) {
return new File(projectDirectory, "pom.xml");
}
}
|
DefaultModelLocator
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ClassModifierShouldBe.java
|
{
"start": 1024,
"end": 1325
}
|
class ____ extends BasicErrorMessageFactory {
private static final String PACKAGE_PRIVATE = "package-private";
private ClassModifierShouldBe(Class<?> actual, boolean positive, String modifier) {
super("%nExpecting actual:%n %s%n" + (positive ? "to" : "not to") + " be a %s
|
ClassModifierShouldBe
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/invoker/basic/InterceptedMethodInvokerTest.java
|
{
"start": 2490,
"end": 2706
}
|
class ____ {
public static int counter = 0;
@MyInterceptorBinding
public String hello(int param1, List<String> param2) {
return "foobar" + param1 + param2;
}
}
}
|
MyService
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
|
{
"start": 14774,
"end": 21400
}
|
class ____ implements BeforeEachCallback, AfterEachCallback {
private YarnConfiguration conf;
private SliderFileSystem fs;
private java.nio.file.Path serviceBasePath;
@Override
public void afterEach(ExtensionContext context) throws Exception {
delete(context);
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
conf = new YarnConfiguration();
delete(context);
serviceBasePath = Paths.get("target",
getClassName(context), getMethodName(context));
conf.set(YARN_SERVICE_BASE_PATH, serviceBasePath.toString());
try {
Files.createDirectories(serviceBasePath);
fs = new SliderFileSystem(conf);
fs.setAppDir(new Path(serviceBasePath.toString()));
} catch (IOException e) {
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
private void delete(ExtensionContext context) {
FileUtils.deleteQuietly(Paths.get("target", getClassName(context)).toFile());
}
private String getClassName(ExtensionContext context) {
Class<?> requiredTestClass = context.getRequiredTestClass();
return requiredTestClass.getName();
}
private String getMethodName(ExtensionContext context) {
return context.getTestMethod().get().getName();
}
/**
* Returns the yarn conf.
*/
public YarnConfiguration getConf() {
return conf;
}
/**
* Returns the file system.
*/
public SliderFileSystem getFs() {
return fs;
}
/**
* Returns the test service base path.
*/
public java.nio.file.Path getServiceBasePath() {
return serviceBasePath;
}
}
/**
* Wait until all the containers for all components become ready state.
*
* @param client
* @param exampleApp
* @return all ready containers of a service.
* @throws TimeoutException
* @throws InterruptedException
*/
protected Multimap<String, String> waitForAllCompToBeReady(ServiceClient
client, Service exampleApp) throws TimeoutException,
InterruptedException {
int expectedTotalContainers = countTotalContainers(exampleApp);
Multimap<String, String> allContainers = HashMultimap.create();
GenericTestUtils.waitFor(() -> {
try {
Service retrievedApp = client.getStatus(exampleApp.getName());
int totalReadyContainers = 0;
allContainers.clear();
LOG.info("Num Components " + retrievedApp.getComponents().size());
for (Component component : retrievedApp.getComponents()) {
LOG.info("looking for " + component.getName());
LOG.info(component.toString());
if (component.getContainers() != null) {
if (component.getContainers().size() == exampleApp
.getComponent(component.getName()).getNumberOfContainers()) {
for (Container container : component.getContainers()) {
LOG.info("Container state {}, component {}.",
container.getState(), component.getName());
if (container.getState() == ContainerState.READY) {
totalReadyContainers++;
allContainers.put(component.getName(), container.getId());
LOG.info("Found 1 ready container {}.", container.getId());
}
}
} else {
LOG.info(component.getName() + " Expected number of containers "
+ exampleApp.getComponent(component.getName())
.getNumberOfContainers() + ", current = " + component
.getContainers());
}
}
}
LOG.info("Exit loop, totalReadyContainers= " + totalReadyContainers
+ " expected = " + expectedTotalContainers);
return totalReadyContainers == expectedTotalContainers;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}, 2000, 200000);
return allContainers;
}
/**
* Wait until service state becomes stable. A service is stable when all
* requested containers of all components are running and in ready state.
*
* @param client
* @param exampleApp
* @throws TimeoutException
* @throws InterruptedException
*/
protected void waitForServiceToBeStable(ServiceClient client,
Service exampleApp) throws TimeoutException, InterruptedException {
waitForServiceToBeStable(client, exampleApp, 200000);
}
protected void waitForServiceToBeStable(ServiceClient client,
Service exampleApp, int waitForMillis)
throws TimeoutException, InterruptedException {
waitForServiceToBeInState(client, exampleApp, ServiceState.STABLE,
waitForMillis);
}
/**
* Wait until service is started. It does not have to reach a stable state.
*
* @param client
* @param exampleApp
* @throws TimeoutException
* @throws InterruptedException
*/
protected void waitForServiceToBeStarted(ServiceClient client,
Service exampleApp) throws TimeoutException, InterruptedException {
waitForServiceToBeInState(client, exampleApp, ServiceState.STARTED);
}
protected void waitForServiceToBeExpressUpgrading(ServiceClient client,
Service exampleApp) throws TimeoutException, InterruptedException {
waitForServiceToBeInState(client, exampleApp,
ServiceState.EXPRESS_UPGRADING);
}
protected void waitForServiceToBeInState(ServiceClient client,
Service exampleApp, ServiceState desiredState) throws TimeoutException,
InterruptedException {
waitForServiceToBeInState(client, exampleApp, desiredState, 200000);
}
/**
* Wait until service is started. It does not have to reach a stable state.
*
* @param client
* @param exampleApp
* @throws TimeoutException
* @throws InterruptedException
*/
protected void waitForServiceToBeInState(ServiceClient client,
Service exampleApp, ServiceState desiredState, int waitForMillis) throws
TimeoutException, InterruptedException {
GenericTestUtils.waitFor(() -> {
try {
Service retrievedApp = client.getStatus(exampleApp.getName());
return retrievedApp.getState() == desiredState;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}, 2000, waitForMillis);
}
private int countTotalContainers(Service service) {
int totalContainers = 0;
for (Component component : service.getComponents()) {
totalContainers += component.getNumberOfContainers();
}
return totalContainers;
}
}
|
ServiceFSWatcher
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/registration/RetryingRegistrationTest.java
|
{
"start": 18933,
"end": 19006
}
|
enum ____ {
REJECTED;
}
}
static
|
RejectionReason
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java
|
{
"start": 6082,
"end": 6279
}
|
class ____ {
void afterStoreApp(RMStateStore store, ApplicationId appId) {}
void afterStoreAppAttempt(RMStateStore store, ApplicationAttemptId
appAttId) {}
}
|
StoreStateVerifier
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/ClassesBaseTest.java
|
{
"start": 1186,
"end": 1398
}
|
class ____ {
public String publicField;
public String publicField2;
protected String protectedField;
@SuppressWarnings("unused")
private String privateField;
}
protected static
|
AnnotatedClass
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java
|
{
"start": 58779,
"end": 59304
}
|
class ____ extends MetadataTask {
private final Set<String> inferenceEntityIds;
DeleteModelMetadataTask(ProjectId projectId, Set<String> inferenceEntityId, ActionListener<AcknowledgedResponse> listener) {
super(projectId, listener);
this.inferenceEntityIds = inferenceEntityId;
}
@Override
ModelRegistryMetadata executeTask(ModelRegistryMetadata current) {
return current.withRemovedModel(inferenceEntityIds);
}
}
}
|
DeleteModelMetadataTask
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/bytes/BytesReferenceStreamInput.java
|
{
"start": 1031,
"end": 10077
}
|
class ____ extends StreamInput {
private static final ByteBuffer EMPTY = ByteBuffer.wrap(new byte[0]);
protected final BytesReference bytesReference;
private BytesRefIterator iterator;
private ByteBuffer slice;
private int totalOffset; // the offset on the stream at which the current slice starts
private int mark = 0;
BytesReferenceStreamInput(BytesReference bytesReference) throws IOException {
this.bytesReference = bytesReference;
this.iterator = bytesReference.iterator();
this.slice = convertToByteBuffer(iterator.next());
this.totalOffset = 0;
}
static ByteBuffer convertToByteBuffer(BytesRef bytesRef) {
if (bytesRef == null) {
return EMPTY;
}
// slice here forces the buffer to have a sliced view, keeping track of the original offset
return ByteBuffer.wrap(bytesRef.bytes, bytesRef.offset, bytesRef.length).slice();
}
@Override
public byte readByte() throws IOException {
maybeNextSlice();
return slice.get();
}
@Override
public short readShort() throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer slice = this.slice;
if (slice.remaining() >= 2) {
return slice.getShort();
} else {
// slow path
return super.readShort();
}
}
@Override
public int readInt() throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer slice = this.slice;
if (slice.remaining() >= 4) {
return slice.getInt();
} else {
// slow path
return super.readInt();
}
}
@Override
public long readLong() throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer slice = this.slice;
if (slice.remaining() >= 8) {
return slice.getLong();
} else {
// slow path
return super.readLong();
}
}
@Override
public String readString() throws IOException {
final int chars = readArraySize();
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer slice = this.slice;
if (slice.hasArray()) {
// attempt reading bytes directly into a string to minimize copying
final String string = tryReadStringFromBytes(
slice.array(),
slice.position() + slice.arrayOffset(),
slice.limit() + slice.arrayOffset(),
chars
);
if (string != null) {
return string;
}
}
return doReadString(chars);
}
@Override
public int readVInt() throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer slice = this.slice;
if (slice.remaining() >= 5) {
return ByteBufferStreamInput.readVInt(slice);
}
return super.readVInt();
}
@Override
public long readVLong() throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer slice = this.slice;
if (slice.remaining() >= 10) {
return ByteBufferStreamInput.readVLong(slice);
} else {
return super.readVLong();
}
}
protected int offset() {
return totalOffset + slice.position();
}
private void maybeNextSlice() throws IOException {
if (slice.hasRemaining() == false) {
// moveToNextSlice is intentionally extracted to another method since it's the assumed cold-path
moveToNextSlice();
}
}
private void moveToNextSlice() throws IOException {
totalOffset += slice.limit();
BytesRef bytesRef = iterator.next();
while (bytesRef != null && bytesRef.length == 0) {
// rare corner case of a bytes reference that has a 0-length component
bytesRef = iterator.next();
}
if (bytesRef == null) {
throw new EOFException();
}
slice = convertToByteBuffer(bytesRef);
assert slice.position() == 0;
}
@Override
public void readBytes(byte[] b, int bOffset, int len) throws IOException {
Objects.checkFromIndexSize(offset(), len, bytesReference.length());
final int bytesRead = read(b, bOffset, len);
assert bytesRead == len : bytesRead + " vs " + len;
}
@Override
public int read() throws IOException {
if (offset() >= bytesReference.length()) {
return -1;
}
return Byte.toUnsignedInt(readByte());
}
@Override
public int read(final byte[] b, final int bOffset, final int len) throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer slice = this.slice;
if (slice.remaining() >= len) {
slice.get(b, bOffset, len);
return len;
}
return readFromMultipleSlices(b, bOffset, len);
}
private int readFromMultipleSlices(byte[] b, int bOffset, int len) throws IOException {
final int length = bytesReference.length();
final int offset = offset();
if (offset >= length) {
return -1;
}
final int numBytesToCopy = Math.min(len, length - offset);
int remaining = numBytesToCopy; // copy the full length or the remaining part
int destOffset = bOffset;
while (remaining > 0) {
maybeNextSlice();
final int currentLen = Math.min(remaining, slice.remaining());
assert currentLen > 0 : "length has to be > 0 to make progress but was: " + currentLen;
slice.get(b, destOffset, currentLen);
destOffset += currentLen;
remaining -= currentLen;
assert remaining >= 0 : "remaining: " + remaining;
}
return numBytesToCopy;
}
@Override
public void close() {
// do nothing
}
@Override
public int available() {
return bytesReference.length() - offset();
}
@Override
protected void ensureCanReadBytes(int bytesToRead) throws EOFException {
int bytesAvailable = bytesReference.length() - offset();
if (bytesAvailable < bytesToRead) {
throwEOF(bytesToRead, bytesAvailable);
}
}
@Override
public long skip(long n) throws IOException {
if (n <= 0L) {
return 0L;
}
if (n <= slice.remaining()) {
slice.position(slice.position() + (int) n);
return n;
}
return skipMultiple(n);
}
private int skipMultiple(long n) throws IOException {
assert offset() <= bytesReference.length() : offset() + " vs " + bytesReference.length();
// definitely >= 0 and <= Integer.MAX_VALUE so casting is ok
final int numBytesSkipped = (int) Math.min(n, bytesReference.length() - offset());
int remaining = numBytesSkipped;
while (remaining > 0) {
maybeNextSlice();
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer slice = this.slice;
int currentLen = Math.min(remaining, slice.remaining());
remaining -= currentLen;
slice.position(slice.position() + currentLen);
assert remaining >= 0 : "remaining: " + remaining;
}
return numBytesSkipped;
}
@Override
public void reset() throws IOException {
if (totalOffset <= mark) {
slice.position(mark - totalOffset);
} else {
iterator = bytesReference.iterator();
slice = convertToByteBuffer(iterator.next());
totalOffset = 0;
final long skipped = skip(mark);
assert skipped == mark : skipped + " vs " + mark;
}
}
@Override
public BytesReference readSlicedBytesReference() throws IOException {
int len = readVInt();
int pos = offset();
if (len != skip(len)) {
throw new EOFException();
}
return bytesReference.slice(pos, len);
}
@Override
public boolean markSupported() {
return true;
}
@Override
public void mark(int readLimit) {
// We ignore readLimit since the data is all in-memory and therefore we can reset the mark no matter how far we advance.
this.mark = offset();
}
}
|
BytesReferenceStreamInput
|
java
|
apache__camel
|
components/camel-ibm/camel-ibm-watson-speech-to-text/src/test/java/org/apache/camel/component/ibm/watson/stt/integration/WatsonSpeechToTextTestSupport.java
|
{
"start": 1005,
"end": 1899
}
|
class ____ extends CamelTestSupport {
protected static String apiKey;
protected static String serviceUrl;
static {
apiKey = System.getProperty("camel.ibm.watson.stt.apiKey");
serviceUrl = System.getProperty("camel.ibm.watson.stt.serviceUrl");
}
protected String buildEndpointUri() {
return buildEndpointUri(null);
}
protected String buildEndpointUri(String operation) {
StringBuilder uri = new StringBuilder("ibm-watson-speech-to-text://default");
uri.append("?apiKey=RAW(").append(apiKey).append(")");
if (serviceUrl != null && !serviceUrl.isEmpty()) {
uri.append("&serviceUrl=").append(serviceUrl);
}
if (operation != null && !operation.isEmpty()) {
uri.append("&operation=").append(operation);
}
return uri.toString();
}
}
|
WatsonSpeechToTextTestSupport
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/test/java/io/quarkus/it/main/testing/repro8446/Repro8446Test.java
|
{
"start": 267,
"end": 443
}
|
class ____ {
@TestTemplate
@ExtendWith(GreeterExtension.class)
public void test(Greeter greeter) {
assertEquals("hello", greeter.hello());
}
}
|
Repro8446Test
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.