language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/catalog/ExtensionCatalogImpl.java | {
"start": 2879,
"end": 7172
} | class ____ extends ExtensionOriginImpl.Builder implements ExtensionCatalog.Mutable {
private String quarkusCoreVersion;
private String upstreamQuarkusCoreVersion;
private List<ExtensionOrigin> derivedFrom;
private List<Extension> extensions;
private List<Category> categories;
public Builder() {
}
Builder(ExtensionCatalog source) {
super(source);
this.quarkusCoreVersion = source.getQuarkusCoreVersion();
this.upstreamQuarkusCoreVersion = source.getUpstreamQuarkusCoreVersion();
this.derivedFrom = JsonBuilder.modifiableListOrNull(source.getDerivedFrom());
this.extensions = JsonBuilder.modifiableListOrNull(source.getExtensions());
this.categories = JsonBuilder.modifiableListOrNull(source.getCategories());
}
@Override
public Builder setId(String id) {
super.setId(id);
return this;
}
@Override
public Builder setPlatform(boolean platform) {
super.setPlatform(platform);
return this;
}
@Override
public Builder setBom(ArtifactCoords bom) {
super.setBom(bom);
return this;
}
@Override
public String getQuarkusCoreVersion() {
return quarkusCoreVersion;
}
public Builder setQuarkusCoreVersion(String quarkusCoreVersion) {
this.quarkusCoreVersion = quarkusCoreVersion;
return this;
}
@Override
public String getUpstreamQuarkusCoreVersion() {
return upstreamQuarkusCoreVersion;
}
@Override
public Builder setUpstreamQuarkusCoreVersion(String upstreamQuarkusCoreVersion) {
this.upstreamQuarkusCoreVersion = upstreamQuarkusCoreVersion;
return this;
}
@Override
public List<ExtensionOrigin> getDerivedFrom() {
return derivedFrom == null ? Collections.emptyList() : derivedFrom;
}
@JsonDeserialize(contentAs = ExtensionOriginImpl.Builder.class)
public Builder setDerivedFrom(List<ExtensionOrigin> origins) {
this.derivedFrom = JsonBuilder.modifiableListOrNull(origins);
return this;
}
@Override
public List<Extension> getExtensions() {
return extensions == null ? Collections.emptyList() : extensions;
}
@JsonDeserialize(contentAs = ExtensionImpl.Builder.class)
public Builder setExtensions(List<Extension> extensions) {
this.extensions = JsonBuilder.modifiableListOrNull(extensions);
return this;
}
public Builder addExtension(Extension e) {
if (extensions == null) {
extensions = new ArrayList<>();
}
extensions.add(e);
return this;
}
@Override
public List<Category> getCategories() {
return categories == null ? Collections.emptyList() : categories;
}
@JsonDeserialize(contentAs = CategoryImpl.Builder.class)
public Builder setCategories(List<Category> categories) {
this.categories = JsonBuilder.modifiableListOrNull(categories);
return this;
}
public Builder addCategory(Category c) {
if (categories == null) {
categories = new ArrayList<>();
}
categories.add(c);
return this;
}
public Builder setMetadata(Map<String, Object> newValues) {
super.setMetadata(newValues);
return this;
}
@JsonIgnore
public Builder setMetadata(String key, Object value) {
super.setMetadata(key, value);
return this;
}
public Builder removeMetadata(String key) {
super.removeMetadata(key);
return this;
}
@Override
public ExtensionCatalogImpl build() {
List<Extension> built = JsonBuilder.buildersToUnmodifiableList(this.extensions);
return new ExtensionCatalogImpl(this, built);
}
}
// Note: hashcode, equals, and toString from ExtensionOrigin
}
| Builder |
java | quarkusio__quarkus | extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/ConfigMapConfigSourceUtil.java | {
"start": 371,
"end": 1432
} | class ____ extends AbstractKubernetesConfigSourceUtil {
@Override
String getType() {
return "ConfigMap";
}
@Override
OrdinalData ordinalData() {
return OrdinalData.CONFIG_MAP;
}
@Override
ConfigSource createLiteralDataConfigSource(String kubernetesConfigSourceName, Map<String, String> propertyMap,
int ordinal) {
return new ConfigMapLiteralDataPropertiesConfigSource(kubernetesConfigSourceName, propertyMap, ordinal);
}
@Override
ConfigSource createPropertiesConfigSource(String kubernetesConfigSourceName, String fileName, String input, int ordinal) {
return new ConfigMapStringInputPropertiesConfigSource(kubernetesConfigSourceName, fileName, input, ordinal);
}
@Override
ConfigSource createYamlConfigSource(String kubernetesConfigSourceName, String fileName, String input, int ordinal) {
return new ConfigMapStringInputYamlConfigSource(kubernetesConfigSourceName, fileName, input, ordinal);
}
private static final | ConfigMapConfigSourceUtil |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/completable/CompletableUnsafeTest.java | {
"start": 949,
"end": 2788
} | class ____ extends RxJavaTest {
@Test(expected = IllegalArgumentException.class)
public void unsafeCreateRejectsCompletable() {
Completable.unsafeCreate(Completable.complete());
}
@Test
public void wrapAlreadyCompletable() {
assertSame(Completable.complete(), Completable.wrap(Completable.complete()));
}
@Test
public void wrapCustomCompletable() {
Completable.wrap(new CompletableSource() {
@Override
public void subscribe(CompletableObserver observer) {
observer.onSubscribe(Disposable.empty());
observer.onComplete();
}
})
.test()
.assertResult();
}
@Test(expected = NullPointerException.class)
public void unsafeCreateThrowsNPE() {
Completable.unsafeCreate(new CompletableSource() {
@Override
public void subscribe(CompletableObserver observer) {
throw new NullPointerException();
}
}).test();
}
@Test
public void unsafeCreateThrowsIAE() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Completable.unsafeCreate(new CompletableSource() {
@Override
public void subscribe(CompletableObserver observer) {
throw new IllegalArgumentException();
}
}).test();
fail("Should have thrown!");
} catch (NullPointerException ex) {
if (!(ex.getCause() instanceof IllegalArgumentException)) {
fail(ex.toString() + ": should have thrown NPA(IAE)");
}
TestHelper.assertError(errors, 0, IllegalArgumentException.class);
} finally {
RxJavaPlugins.reset();
}
}
}
| CompletableUnsafeTest |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/components/dynamic/PlainComponent.java | {
"start": 224,
"end": 3317
} | class ____ {
private String componentNote;
private List<ManyToManyEntity> manyToManyList = new ArrayList<ManyToManyEntity>();
private OneToOneEntity oneToOneEntity;
private ManyToOneEntity manyToOneEntity;
private InternalComponent internalComponent;
private List<InternalComponent> internalComponents;
public String getComponentNote() {
return componentNote;
}
public void setComponentNote(String componentNote) {
this.componentNote = componentNote;
}
public List<ManyToManyEntity> getManyToManyList() {
return manyToManyList;
}
public void setManyToManyList(List<ManyToManyEntity> manyToManyList) {
this.manyToManyList = manyToManyList;
}
public OneToOneEntity getOneToOneEntity() {
return oneToOneEntity;
}
public void setOneToOneEntity(OneToOneEntity oneToOneEntity) {
this.oneToOneEntity = oneToOneEntity;
}
public ManyToOneEntity getManyToOneEntity() {
return manyToOneEntity;
}
public void setManyToOneEntity(ManyToOneEntity manyToOneEntity) {
this.manyToOneEntity = manyToOneEntity;
}
public InternalComponent getInternalComponent() {
return internalComponent;
}
public void setInternalComponent(InternalComponent internalComponent) {
this.internalComponent = internalComponent;
}
public List<InternalComponent> getInternalComponents() {
return internalComponents;
}
public void setInternalComponents(List<InternalComponent> internalComponents) {
this.internalComponents = internalComponents;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !( o instanceof PlainComponent ) ) {
return false;
}
PlainComponent that = (PlainComponent) o;
if ( componentNote != null ? !componentNote.equals( that.componentNote ) : that.componentNote != null ) {
return false;
}
if ( internalComponent != null ? !internalComponent.equals( that.internalComponent ) : that.internalComponent != null ) {
return false;
}
if ( internalComponents != null ? !internalComponents.equals( that.internalComponents ) : that.internalComponents != null ) {
return false;
}
if ( manyToManyList != null ? !manyToManyList.equals( that.manyToManyList ) : that.manyToManyList != null ) {
return false;
}
if ( manyToOneEntity != null ? !manyToOneEntity.equals( that.manyToOneEntity ) : that.manyToOneEntity != null ) {
return false;
}
if ( oneToOneEntity != null ? !oneToOneEntity.equals( that.oneToOneEntity ) : that.oneToOneEntity != null ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = componentNote != null ? componentNote.hashCode() : 0;
result = 31 * result + ( manyToManyList != null ? manyToManyList.hashCode() : 0 );
result = 31 * result + ( oneToOneEntity != null ? oneToOneEntity.hashCode() : 0 );
result = 31 * result + ( manyToOneEntity != null ? manyToOneEntity.hashCode() : 0 );
result = 31 * result + ( internalComponent != null ? internalComponent.hashCode() : 0 );
result = 31 * result + ( internalComponents != null ? internalComponents.hashCode() : 0 );
return result;
}
}
| PlainComponent |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ExtensionRegistrationViaParametersAndFieldsTests.java | {
"start": 4380,
"end": 10815
} | class ____ extends AbstractJupiterTestEngineTests {
@Test
void constructorParameter() {
assertOneTestSucceeded(ConstructorParameterTestCase.class);
}
@Test
void constructorParameterForNestedTestClass() {
assertTestsSucceeded(NestedConstructorParameterTestCase.class, 2);
}
@Test
void beforeAllMethodParameter() {
assertOneTestSucceeded(BeforeAllParameterTestCase.class);
}
@Test
void afterAllMethodParameter() {
assertOneTestSucceeded(AfterAllParameterTestCase.class);
}
@Test
void beforeEachMethodParameter() {
assertOneTestSucceeded(BeforeEachParameterTestCase.class);
}
@Test
void afterEachMethodParameter() {
assertOneTestSucceeded(AfterEachParameterTestCase.class);
}
@Test
void testMethodParameter() {
assertOneTestSucceeded(TestMethodParameterTestCase.class);
}
@Test
void testFactoryMethodParameter() {
assertTestsSucceeded(TestFactoryMethodParameterTestCase.class, 2);
}
@Test
void testTemplateMethodParameter() {
assertTestsSucceeded(TestTemplateMethodParameterTestCase.class, 2);
}
@Test
void multipleRegistrationsViaParameter(@TrackLogRecords LogRecordListener listener) {
assertOneTestSucceeded(MultipleRegistrationsViaParameterTestCase.class);
assertThat(getRegisteredLocalExtensions(listener)).containsExactly("LongParameterResolver", "DummyExtension");
}
@Test
void staticField() {
assertOneTestSucceeded(StaticFieldTestCase.class);
}
@Test
void instanceField() {
assertOneTestSucceeded(InstanceFieldTestCase.class);
}
@Test
void fieldsWithTestInstancePerClass() {
assertOneTestSucceeded(TestInstancePerClassFieldTestCase.class);
}
@ParameterizedTest
@ValueSource(classes = { MultipleMixedRegistrationsViaFieldTestCase.class,
MultipleExtendWithRegistrationsViaFieldTestCase.class })
void multipleRegistrationsViaField(Class<?> testClass, @TrackLogRecords LogRecordListener listener) {
assertOneTestSucceeded(testClass);
assertThat(getRegisteredLocalExtensions(listener)).containsExactly("LongParameterResolver", "DummyExtension");
}
@Test
void duplicateRegistrationViaField() {
Class<?> testClass = DuplicateRegistrationViaFieldTestCase.class;
String expectedMessage = "Failed to register extension via field "
+ "[org.junit.jupiter.api.extension.Extension "
+ "org.junit.jupiter.engine.extension.ExtensionRegistrationViaParametersAndFieldsTests$DuplicateRegistrationViaFieldTestCase.dummy]. "
+ "The field registers an extension of type [org.junit.jupiter.engine.extension.DummyExtension] "
+ "via @RegisterExtension and @ExtendWith, but only one registration of a given extension type is permitted.";
executeTestsForClass(testClass).testEvents().assertThatEvents().haveExactly(1,
finishedWithFailure(instanceOf(PreconditionViolationException.class), message(expectedMessage)));
}
@ParameterizedTest(name = "{0}")
@ValueSource(classes = { AllInOneWithTestInstancePerMethodTestCase.class,
AllInOneWithTestInstancePerClassTestCase.class })
void registrationOrder(Class<?> testClass, @TrackLogRecords LogRecordListener listener) {
assertOneTestSucceeded(testClass);
assertThat(getRegisteredLocalExtensions(listener))//
.containsExactly(//
"ClassLevelExtension2", // @RegisterExtension on static field
"StaticField2", // @ExtendWith on static field
"ClassLevelExtension1", // @RegisterExtension on static field
"StaticField1", // @ExtendWith on static field
"ConstructorParameter", // @ExtendWith on parameter in constructor
"BeforeAllParameter", // @ExtendWith on parameter in static @BeforeAll method
"BeforeEachParameter", // @ExtendWith on parameter in @BeforeEach method
"AfterEachParameter", // @ExtendWith on parameter in @AfterEach method
"AfterAllParameter", // @ExtendWith on parameter in static @AfterAll method
"InstanceLevelExtension1", // @RegisterExtension on instance field
"InstanceField1", // @ExtendWith on instance field
"InstanceLevelExtension2", // @RegisterExtension on instance field
"InstanceField2", // @ExtendWith on instance field
"TestParameter" // @ExtendWith on parameter in @Test method
);
}
@Test
void registersProgrammaticTestInstancePostProcessors() {
assertOneTestSucceeded(ProgrammaticTestInstancePostProcessorTestCase.class);
}
@ParameterizedTest
@EnumSource(ParallelExecutorServiceType.class)
void createsExtensionPerInstance(
ParallelHierarchicalTestExecutorServiceFactory.ParallelExecutorServiceType executorServiceType) {
var results = executeTests(request() //
.selectors(selectClass(InitializationPerInstanceTestCase.class)) //
.configurationParameter(Constants.PARALLEL_EXECUTION_ENABLED_PROPERTY_NAME, "true") //
.configurationParameter(Constants.PARALLEL_CONFIG_EXECUTOR_SERVICE_PROPERTY_NAME,
executorServiceType.name()) //
);
assertTestsSucceeded(results, 100);
}
private List<String> getRegisteredLocalExtensions(LogRecordListener listener) {
return listener.stream(MutableExtensionRegistry.class, Level.FINER) //
.map(LogRecord::getMessage) //
.filter(message -> message.contains("local extension")) //
.map(message -> {
message = message.replaceAll(" from source .+", "");
int beginIndex = message.lastIndexOf('.') + 1;
if (message.contains("late-init")) {
return message.substring(beginIndex, message.indexOf("]"));
}
else {
int indexOfDollarSign = message.indexOf("$");
int indexOfAtSign = message.indexOf("@");
int endIndex = (indexOfDollarSign > 1 ? indexOfDollarSign : indexOfAtSign);
return message.substring(beginIndex, endIndex);
}
}) //
.toList();
}
private void assertOneTestSucceeded(Class<?> testClass) {
assertTestsSucceeded(testClass, 1);
}
private void assertTestsSucceeded(Class<?> testClass, int expected) {
assertTestsSucceeded(executeTestsForClass(testClass), expected);
}
private static void assertTestsSucceeded(EngineExecutionResults results, int expected) {
results.testEvents().assertStatistics(
stats -> stats.started(expected).succeeded(expected).skipped(0).aborted(0).failed(0));
}
// -------------------------------------------------------------------
/**
* The {@link MagicParameter.Extension} is first registered for the constructor
* and then used for lifecycle and test methods.
*/
@ExtendWith(LongParameterResolver.class)
static | ExtensionRegistrationViaParametersAndFieldsTests |
java | apache__flink | flink-metrics/flink-metrics-dropwizard/src/test/java/org/apache/flink/dropwizard/metrics/FlinkMeterWrapperTest.java | {
"start": 1158,
"end": 2124
} | class ____ {
private static final double DELTA = 0.0001;
@Test
void testWrapper() {
Meter meter = new TestMeter();
FlinkMeterWrapper wrapper = new FlinkMeterWrapper(meter);
assertThat(wrapper.getMeanRate()).isEqualTo(0);
assertThat(wrapper.getOneMinuteRate()).isEqualTo(5);
assertThat(wrapper.getFiveMinuteRate()).isEqualTo(0);
assertThat(wrapper.getFifteenMinuteRate()).isEqualTo(0);
assertThat(wrapper.getCount()).isEqualTo(100L);
}
@Test
void testMarkOneEvent() {
Meter meter = mock(Meter.class);
FlinkMeterWrapper wrapper = new FlinkMeterWrapper(meter);
wrapper.mark();
verify(meter).markEvent();
}
@Test
void testMarkSeveralEvents() {
Meter meter = mock(Meter.class);
FlinkMeterWrapper wrapper = new FlinkMeterWrapper(meter);
wrapper.mark(5);
verify(meter).markEvent(5);
}
}
| FlinkMeterWrapperTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1500/Issue1570_private.java | {
"start": 199,
"end": 1801
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
Model model = new Model();
assertEquals("{}", JSON.toJSONString(model, SerializerFeature.WriteNullBooleanAsFalse));
assertEquals("{\"value\":\"\"}", JSON.toJSONString(model, SerializerFeature.WriteNullStringAsEmpty));
}
public void test_for_issue_int() throws Exception {
ModelInt model = new ModelInt();
assertEquals("{}", JSON.toJSONString(model, SerializerFeature.WriteNullBooleanAsFalse));
assertEquals("{\"value\":0}", JSON.toJSONString(model, SerializerFeature.WriteNullNumberAsZero));
}
public void test_for_issue_long() throws Exception {
ModelLong model = new ModelLong();
assertEquals("{}", JSON.toJSONString(model, SerializerFeature.WriteNullBooleanAsFalse));
assertEquals("{\"value\":0}", JSON.toJSONString(model, SerializerFeature.WriteNullNumberAsZero));
}
public void test_for_issue_bool() throws Exception {
ModelBool model = new ModelBool();
assertEquals("{}", JSON.toJSONString(model, SerializerFeature.WriteNullNumberAsZero));
assertEquals("{\"value\":false}", JSON.toJSONString(model, SerializerFeature.WriteNullBooleanAsFalse));
}
public void test_for_issue_list() throws Exception {
ModelList model = new ModelList();
assertEquals("{}", JSON.toJSONString(model, SerializerFeature.WriteNullNumberAsZero));
assertEquals("{\"value\":[]}", JSON.toJSONString(model, SerializerFeature.WriteNullListAsEmpty));
}
private static | Issue1570_private |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java | {
"start": 1217,
"end": 1625
} | class ____ extends ActionType<GetCheckpointNodeAction.Response> {
public static final GetCheckpointNodeAction INSTANCE = new GetCheckpointNodeAction();
// note: this is an index action and requires `view_index_metadata`
public static final String NAME = GetCheckpointAction.NAME + "[n]";
private GetCheckpointNodeAction() {
super(NAME);
}
public static | GetCheckpointNodeAction |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/db2/DB2SelectTest_20.java | {
"start": 1037,
"end": 2741
} | class ____ extends DB2Test {
public void test_0() throws Exception {
String sql = "SELECT EMPNO.PREVVAL FROM SYSIBM.SYSDUMMY1";
DB2StatementParser parser = new DB2StatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
DB2SchemaStatVisitor visitor = new DB2SchemaStatVisitor();
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("SYSIBM.SYSDUMMY1")));
// assertTrue(visitor.getColumns().contains(new Column("DSN8B10.EMP", "WORKDEPT")));
// assertTrue(visitor.getColumns().contains(new Column("mytable", "first_name")));
// assertTrue(visitor.getColumns().contains(new Column("mytable", "full_name")));
assertEquals("SELECT EMPNO.PREVVAL"
+ "\nFROM SYSIBM.SYSDUMMY1", //
SQLUtils.toSQLString(stmt, JdbcConstants.DB2));
assertEquals("select EMPNO.prevval"
+ "\nfrom SYSIBM.SYSDUMMY1", //
SQLUtils.toSQLString(stmt, JdbcConstants.DB2, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
}
}
| DB2SelectTest_20 |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unproxyable/FinalMethodRemoveFlagTest.java | {
"start": 930,
"end": 1162
} | class ____ extends MyParent {
private String foo;
final String ping() {
return foo;
}
@PostConstruct
void init() {
foo = "ok";
}
}
public static | MyBean |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/remote/FuzzyWatchSyncNotifyCallback.java | {
"start": 1166,
"end": 4052
} | class ____ extends AbstractPushCallBack {
/**
* The RpcPushTask associated with the callback.
*/
FuzzyWatchSyncNotifyTask fuzzyWatchSyncNotifyTask;
/**
* Constructs a new RpcPushCallback with the specified parameters.
*
* @param fuzzyWatchSyncNotifyTask The RpcPushTask associated with the callback
*/
public FuzzyWatchSyncNotifyCallback(FuzzyWatchSyncNotifyTask fuzzyWatchSyncNotifyTask) {
super(3000L);
this.fuzzyWatchSyncNotifyTask = fuzzyWatchSyncNotifyTask;
}
/**
* Handles the successful completion of the RPC push operation.
*/
@Override
public void onSuccess() {
// Check TPS limits
TpsCheckRequest tpsCheckRequest = new TpsCheckRequest();
tpsCheckRequest.setPointName(FuzzyWatchSyncNotifyTask.CONFIG_FUZZY_WATCH_CONFIG_SYNC_SUCCESS);
ControlManagerCenter.getInstance().getTpsControlManager().check(tpsCheckRequest);
if (fuzzyWatchSyncNotifyTask.batchTaskCounter != null) {
fuzzyWatchSyncNotifyTask.batchTaskCounter.batchSuccess(
fuzzyWatchSyncNotifyTask.notifyRequest.getCurrentBatch());
if (fuzzyWatchSyncNotifyTask.batchTaskCounter.batchCompleted()
&& fuzzyWatchSyncNotifyTask.notifyRequest.getSyncType().equals(FUZZY_WATCH_INIT_NOTIFY)) {
ConfigFuzzyWatchSyncRequest request = ConfigFuzzyWatchSyncRequest.buildInitFinishRequest(
fuzzyWatchSyncNotifyTask.notifyRequest.getGroupKeyPattern());
// Create RPC push task and push the request to the client
FuzzyWatchSyncNotifyTask fuzzyWatchSyncNotifyTaskFinish = new FuzzyWatchSyncNotifyTask(
fuzzyWatchSyncNotifyTask.connectionManager, fuzzyWatchSyncNotifyTask.rpcPushService, request,
null, fuzzyWatchSyncNotifyTask.maxRetryTimes, fuzzyWatchSyncNotifyTask.connectionId);
fuzzyWatchSyncNotifyTaskFinish.scheduleSelf();
}
}
}
/**
* Handles the failure of the RPC push operation.
*
* @param e The exception thrown during the operation
*/
@Override
public void onFail(Throwable e) {
// Check TPS limits
TpsCheckRequest tpsCheckRequest = new TpsCheckRequest();
tpsCheckRequest.setPointName(FuzzyWatchSyncNotifyTask.CONFIG_FUZZY_WATCH_CONFIG_SYNC_FAIL);
ControlManagerCenter.getInstance().getTpsControlManager().check(tpsCheckRequest);
// Log the failure and retry the task
Loggers.REMOTE_PUSH.warn("Push fail, groupKeyPattern={}, clientId={}",
fuzzyWatchSyncNotifyTask.notifyRequest.getGroupKeyPattern(), fuzzyWatchSyncNotifyTask.connectionId, e);
fuzzyWatchSyncNotifyTask.scheduleSelf();
}
}
| FuzzyWatchSyncNotifyCallback |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/mixins/TestMixinMerging.java | {
"start": 581,
"end": 710
} | class ____ implements Contact {
@Override
public String getCity() { return "Seattle"; }
}
static | ContactImpl |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/fielddata/StoredFieldIndexFieldData.java | {
"start": 1357,
"end": 3429
} | class ____<T> implements IndexFieldData<StoredFieldIndexFieldData<T>.StoredFieldLeafFieldData> {
private final String fieldName;
private final ValuesSourceType valuesSourceType;
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
protected final StoredFieldLoader loader;
protected StoredFieldIndexFieldData(String fieldName, ValuesSourceType valuesSourceType, ToScriptFieldFactory<T> toScriptFieldFactory) {
this.fieldName = fieldName;
this.valuesSourceType = valuesSourceType;
this.toScriptFieldFactory = toScriptFieldFactory;
this.loader = StoredFieldLoader.create(false, Set.of(fieldName));
}
@Override
public String getFieldName() {
return fieldName;
}
@Override
public ValuesSourceType getValuesSourceType() {
return valuesSourceType;
}
@Override
public final StoredFieldLeafFieldData load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
@Override
public final StoredFieldLeafFieldData loadDirect(LeafReaderContext context) throws IOException {
return new StoredFieldLeafFieldData(loader.getLoader(context, null));
}
protected abstract T loadLeaf(LeafStoredFieldLoader leafStoredFieldLoader);
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) {
throw new IllegalArgumentException("not supported for stored field fallback");
}
@Override
public BucketedSort newBucketedSort(
BigArrays bigArrays,
Object missingValue,
MultiValueMode sortMode,
XFieldComparatorSource.Nested nested,
SortOrder sortOrder,
DocValueFormat format,
int bucketSize,
BucketedSort.ExtraData extra
) {
throw new IllegalArgumentException("not supported for stored field fallback");
}
public | StoredFieldIndexFieldData |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/model/Page.java | {
"start": 798,
"end": 1900
} | class ____<E> implements Serializable {
static final long serialVersionUID = 1234544030560484292L;
/**
* totalCount.
*/
private int totalCount;
/**
* pageNumber.
*/
private int pageNumber;
/**
* pagesAvailable.
*/
private int pagesAvailable;
/**
* pageItems.
*/
private List<E> pageItems = new ArrayList<>();
public void setPageNumber(int pageNumber) {
this.pageNumber = pageNumber;
}
public void setPagesAvailable(int pagesAvailable) {
this.pagesAvailable = pagesAvailable;
}
public void setPageItems(List<E> pageItems) {
this.pageItems = pageItems;
}
public int getTotalCount() {
return totalCount;
}
public void setTotalCount(int totalCount) {
this.totalCount = totalCount;
}
public int getPageNumber() {
return pageNumber;
}
public int getPagesAvailable() {
return pagesAvailable;
}
public List<E> getPageItems() {
return pageItems;
}
}
| Page |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/testers/MapIsEmptyTester.java | {
"start": 1413,
"end": 1765
} | class ____<K, V> extends AbstractMapTester<K, V> {
@CollectionSize.Require(ZERO)
public void testIsEmpty_yes() {
assertTrue("isEmpty() should return true", getMap().isEmpty());
}
@CollectionSize.Require(absent = ZERO)
public void testIsEmpty_no() {
assertFalse("isEmpty() should return false", getMap().isEmpty());
}
}
| MapIsEmptyTester |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InfiniteRecursionTest.java | {
"start": 7434,
"end": 7824
} | class ____ {
void f() {
new Object() {
void g() {
f();
}
};
}
}
""")
.doTest();
}
@Test
public void positiveAfterNestedClass() {
compilationHelper
.addSourceLines(
"Test.java",
"""
final | Test |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dml/SqlCompileAndExecutePlan.java | {
"start": 1507,
"end": 3302
} | class ____ extends SqlCall {
public static final SqlSpecialOperator OPERATOR =
new SqlSpecialOperator("COMPILE AND EXECUTE PLAN", SqlKind.OTHER);
private final SqlNode planFile;
private SqlNode operand;
public SqlCompileAndExecutePlan(SqlParserPos pos, SqlNode planFile, SqlNode operand) {
super(pos);
this.planFile = planFile;
this.operand = checkOperand(operand);
}
public String getPlanFile() {
return SqlParseUtils.extractString(planFile);
}
@Nonnull
@Override
public SqlOperator getOperator() {
return OPERATOR;
}
@Nonnull
@Override
public List<SqlNode> getOperandList() {
return Collections.singletonList(operand);
}
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("COMPILE");
writer.keyword("AND");
writer.keyword("EXECUTE");
writer.keyword("PLAN");
planFile.unparse(writer, leftPrec, rightPrec);
writer.keyword("FOR");
operand.unparse(writer, leftPrec, rightPrec);
}
@Override
public void setOperand(int i, SqlNode operand) {
if (i == 0) {
this.operand = checkOperand(operand);
} else {
throw new UnsupportedOperationException(
"SqlCompileAndExecutePlan supports only one operand with index 0");
}
}
private SqlNode checkOperand(SqlNode operand) {
if (!(operand instanceof RichSqlInsert || operand instanceof SqlStatementSet)) {
throw new UnsupportedOperationException(
"SqlCompileAndExecutePlan supports only RichSqlInsert or SqlStatementSet as operand");
}
return operand;
}
}
| SqlCompileAndExecutePlan |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/java/DoubleJavaType.java | {
"start": 639,
"end": 4962
} | class ____ extends AbstractClassJavaType<Double> implements
PrimitiveJavaType<Double> {
public static final DoubleJavaType INSTANCE = new DoubleJavaType();
public DoubleJavaType() {
super( Double.class );
}
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators indicators) {
return DoubleJdbcType.INSTANCE;
}
@Override
public boolean useObjectEqualsHashCode() {
return true;
}
@Override
public String toString(Double value) {
return value == null ? null : value.toString();
}
@Override
public Double fromString(CharSequence string) {
return Double.valueOf( string.toString() );
}
@Override
public boolean isInstance(Object value) {
return value instanceof Double;
}
@SuppressWarnings("unchecked")
@Override
public <X> X unwrap(Double value, Class<X> type, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( Double.class.isAssignableFrom( type ) || type == Object.class ) {
return (X) value;
}
if ( Float.class.isAssignableFrom( type ) ) {
return (X) Float.valueOf( value.floatValue() );
}
if ( Byte.class.isAssignableFrom( type ) ) {
return (X) Byte.valueOf( value.byteValue() );
}
if ( Short.class.isAssignableFrom( type ) ) {
return (X) Short.valueOf( value.shortValue() );
}
if ( Integer.class.isAssignableFrom( type ) ) {
return (X) Integer.valueOf( value.intValue() );
}
if ( Long.class.isAssignableFrom( type ) ) {
return (X) Long.valueOf( value.longValue() );
}
if ( BigInteger.class.isAssignableFrom( type ) ) {
return (X) BigInteger.valueOf( value.longValue() );
}
if ( BigDecimal.class.isAssignableFrom( type ) ) {
return (X) BigDecimal.valueOf( value );
}
if ( String.class.isAssignableFrom( type ) ) {
return (X) value.toString();
}
throw unknownUnwrap( type );
}
@Override
public <X> Double wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( value instanceof Double doubleValue ) {
return doubleValue;
}
if ( value instanceof Number number ) {
return number.doubleValue();
}
else if ( value instanceof String string ) {
return Double.valueOf( string );
}
throw unknownWrap( value.getClass() );
}
@Override
public boolean isWider(JavaType<?> javaType) {
return switch ( javaType.getTypeName() ) {
case
"byte", "java.lang.Byte",
"short", "java.lang.Short",
"int", "java.lang.Integer",
"long", "java.lang.Long",
"float", "java.lang.Float",
"java.math.BigInteger",
"java.math.BigDecimal" -> true;
default -> false;
};
}
@Override
public Class<?> getPrimitiveClass() {
return double.class;
}
@Override
public Class<Double[]> getArrayClass() {
return Double[].class;
}
@Override
public Class<?> getPrimitiveArrayClass() {
return double[].class;
}
@Override
public Double getDefaultValue() {
return 0.0;
}
@Override
public long getDefaultSqlLength(Dialect dialect, JdbcType jdbcType) {
//this is the number of decimal digits
// + sign + decimal point
// + space for "E+nnn"
return 1+17+1+5;
}
@Override
public int getDefaultSqlPrecision(Dialect dialect, JdbcType jdbcType) {
return jdbcType.isFloat()
// this is usually the number of *binary* digits
// in a double-precision FP number
? dialect.getDoublePrecision()
// this is the number of decimal digits in a Java double
: 17;
}
@Override
public <X> Double coerce(X value, CoercionContext coercionContext) {
if ( value == null ) {
return null;
}
if ( value instanceof Double doubleValue ) {
return doubleValue;
}
if ( value instanceof Float floatValue ) {
return CoercionHelper.toDouble( floatValue );
}
if ( value instanceof BigInteger bigInteger ) {
return CoercionHelper.toDouble( bigInteger );
}
if ( value instanceof BigDecimal bigDecimal ) {
return CoercionHelper.toDouble( bigDecimal );
}
if ( value instanceof Number number ) {
return number.doubleValue();
}
if ( value instanceof String string ) {
return CoercionHelper.coerceWrappingError(
() -> Double.parseDouble( string )
);
}
throw new CoercionException(
String.format(
Locale.ROOT,
"Cannot coerce value '%s' [%s] to Double",
value,
value.getClass().getName()
)
);
}
}
| DoubleJavaType |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/metrics/DataSourcePoolMetricsAutoConfigurationTests.java | {
"start": 14201,
"end": 14328
} | class ____ {
@Bean
static HikariSealer hikariSealer() {
return new HikariSealer();
}
static | HikariSealingConfiguration |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RxReturnValueIgnoredTest.java | {
"start": 13083,
"end": 13552
} | class ____ {
Single getSingle() {
return null;
}
void f() {
// BUG: Diagnostic contains: Rx objects must be checked.
getSingle();
}
}
""")
.doTest();
}
@Test
public void rx1Completable() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import rx.Completable;
| Test |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/origin/TextResourceOriginTests.java | {
"start": 3714,
"end": 4340
} | class ____ resource [foo.txt]");
}
@Test
void toStringWhenResourceIsClasspathResourceReturnsToStringWithJar() {
ClassPathResource resource = new ClassPathResource("foo.txt") {
@Override
public URI getURI() throws IOException {
try {
return new URI("jar:file:/home/user/project/target/project-0.0.1-SNAPSHOT.jar"
+ "!/BOOT-INF/classes!/foo.txt");
}
catch (URISyntaxException ex) {
throw new IllegalStateException(ex);
}
}
};
Location location = new Location(1, 2);
TextResourceOrigin origin = new TextResourceOrigin(resource, location);
assertThat(origin).hasToString(" | path |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/service/dump/DumpChangeGrayConfigWorker.java | {
"start": 1668,
"end": 8415
} | class ____ implements Runnable {
Timestamp startTime;
ConfigInfoGrayPersistService configInfoGrayPersistService;
ConfigMigrateService configMigrateService;
private final HistoryConfigInfoPersistService historyConfigInfoPersistService;
int pageSize = 100;
public DumpChangeGrayConfigWorker(ConfigInfoGrayPersistService configInfoGrayPersistService, Timestamp startTime,
HistoryConfigInfoPersistService historyConfigInfoPersistService,
ConfigMigrateService configMigrateService) {
this.configInfoGrayPersistService = configInfoGrayPersistService;
this.startTime = startTime;
this.historyConfigInfoPersistService = historyConfigInfoPersistService;
this.configMigrateService = configMigrateService;
}
@Override
@SuppressWarnings("PMD.MethodTooLongRule")
public void run() {
try {
if (!PropertyUtil.isDumpChangeOn()) {
LogUtil.DEFAULT_LOG.info("DumpGrayChange task is not open");
return;
}
Timestamp currentTime = new Timestamp(System.currentTimeMillis());
LogUtil.DEFAULT_LOG.info("DumpGrayChange start ,from time {},current time {}", startTime, currentTime);
LogUtil.DEFAULT_LOG.info("Start to check delete configs from time {}", startTime);
long startDeletedConfigTime = System.currentTimeMillis();
long deleteCursorId = 0L;
while (true) {
List<ConfigInfoStateWrapper> configDeleted = historyConfigInfoPersistService.findDeletedConfig(startTime,
deleteCursorId, pageSize, Constants.GRAY);
for (ConfigInfoStateWrapper configInfo : configDeleted) {
String grayName = configInfo.getGrayName();
if (StringUtils.isBlank(grayName)) {
continue;
}
ConfigInfoStateWrapper configInfoStateWrapper = configInfoGrayPersistService.findConfigInfo4GrayState(configInfo.getDataId(),
configInfo.getGroup(), configInfo.getTenant(), grayName);
if (configInfoStateWrapper == null) {
ConfigCacheService.removeGray(configInfo.getDataId(), configInfo.getGroup(),
configInfo.getTenant(), grayName);
LogUtil.DEFAULT_LOG.info("[dump-gray-delete-ok], groupKey: {}, tenant: {}, grayName: {}",
GroupKey2.getKey(configInfo.getDataId(), configInfo.getGroup()), configInfo.getTenant(), grayName);
configMigrateService.checkDeletedConfigGrayMigrateState(configInfoStateWrapper);
}
}
if (configDeleted.size() < pageSize) {
break;
}
deleteCursorId = configDeleted.get(configDeleted.size() - 1).getId();
}
LogUtil.DEFAULT_LOG.info("Check delete configs finished,cost:{}",
System.currentTimeMillis() - startDeletedConfigTime);
LogUtil.DEFAULT_LOG.info("Check changeGrayConfig start");
long startChangeConfigTime = System.currentTimeMillis();
long changeCursorId = 0L;
while (true) {
LogUtil.DEFAULT_LOG.info("Check changed gray configs from time {},lastMaxId={}", startTime,
changeCursorId);
List<ConfigInfoGrayWrapper> changeConfigs = configInfoGrayPersistService.findChangeConfig(startTime,
changeCursorId, pageSize);
for (ConfigInfoGrayWrapper cf : changeConfigs) {
configMigrateService.checkChangedConfigGrayMigrateState(cf);
if (StringUtils.isBlank(cf.getTenant())) {
continue;
}
final String groupKey = GroupKey2.getKey(cf.getDataId(), cf.getGroup(), cf.getTenant());
//check md5 & localtimestamp update local disk cache.
boolean newLastModified = cf.getLastModified() > ConfigCacheService.getLastModifiedTs(groupKey);
String localContentMd5 = ConfigCacheService.getContentMd5(groupKey);
boolean md5Update = !localContentMd5.equals(cf.getMd5());
if (newLastModified || md5Update) {
LogUtil.DEFAULT_LOG.info("[dump-change-gray] find change config {}, {}, md5={}",
new Object[] {groupKey, cf.getLastModified(), cf.getMd5()});
LogUtil.DUMP_LOG.info("[dump-change-gray] find change config {}, {}, md5={}",
new Object[] {groupKey, cf.getLastModified(), cf.getMd5()});
ConfigCacheService.dumpGray(cf.getDataId(), cf.getGroup(), cf.getTenant(), cf.getGrayName(),
cf.getGrayRule(), cf.getContent(), cf.getLastModified(), cf.getEncryptedDataKey());
final String content = cf.getContent();
final String md5 = MD5Utils.md5Hex(content, Constants.ENCODE_GBK);
final String md5Utf8 = MD5Utils.md5Hex(content, Constants.ENCODE_UTF8);
LogUtil.DEFAULT_LOG.info("[dump-change-gray-ok] {}, {}, length={}, md5={},md5UTF8={}",
new Object[] {groupKey, cf.getLastModified(), content.length(), md5, md5Utf8});
}
}
if (changeConfigs.size() < pageSize) {
break;
}
changeCursorId = changeConfigs.get(changeConfigs.size() - 1).getId();
}
long endChangeConfigTime = System.currentTimeMillis();
LogUtil.DEFAULT_LOG.info(
"Check changed gray configs finished,cost:{}, next task running will from start time {}",
endChangeConfigTime - startChangeConfigTime, currentTime);
startTime = currentTime;
} catch (Throwable e) {
LogUtil.DEFAULT_LOG.error("Check changed gray configs error", e);
} finally {
ConfigExecutor.scheduleConfigChangeTask(this, PropertyUtil.getDumpChangeWorkerInterval(),
TimeUnit.MILLISECONDS);
LogUtil.DEFAULT_LOG.info("Next dump gray change will scheduled after {} milliseconds",
PropertyUtil.getDumpChangeWorkerInterval());
}
}
}
| DumpChangeGrayConfigWorker |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/hash/ReusingBuildFirstReOpenableHashJoinIterator.java | {
"start": 1454,
"end": 4182
} | class ____<V1, V2, O>
extends ReusingBuildFirstHashJoinIterator<V1, V2, O> {
private final ReOpenableMutableHashTable<V1, V2> reopenHashTable;
public ReusingBuildFirstReOpenableHashJoinIterator(
MutableObjectIterator<V1> firstInput,
MutableObjectIterator<V2> secondInput,
TypeSerializer<V1> serializer1,
TypeComparator<V1> comparator1,
TypeSerializer<V2> serializer2,
TypeComparator<V2> comparator2,
TypePairComparator<V2, V1> pairComparator,
MemoryManager memManager,
IOManager ioManager,
AbstractInvokable ownerTask,
double memoryFraction,
boolean probeSideOuterJoin,
boolean buildSideOuterJoin,
boolean useBitmapFilters)
throws MemoryAllocationException {
super(
firstInput,
secondInput,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparator,
memManager,
ioManager,
ownerTask,
memoryFraction,
probeSideOuterJoin,
buildSideOuterJoin,
useBitmapFilters);
reopenHashTable = (ReOpenableMutableHashTable<V1, V2>) hashJoin;
}
@Override
public <BT, PT> MutableHashTable<BT, PT> getHashJoin(
TypeSerializer<BT> buildSideSerializer,
TypeComparator<BT> buildSideComparator,
TypeSerializer<PT> probeSideSerializer,
TypeComparator<PT> probeSideComparator,
TypePairComparator<PT, BT> pairComparator,
MemoryManager memManager,
IOManager ioManager,
AbstractInvokable ownerTask,
double memoryFraction,
boolean useBitmapFilters)
throws MemoryAllocationException {
final int numPages = memManager.computeNumberOfPages(memoryFraction);
final List<MemorySegment> memorySegments = memManager.allocatePages(ownerTask, numPages);
return new ReOpenableMutableHashTable<BT, PT>(
buildSideSerializer,
probeSideSerializer,
buildSideComparator,
probeSideComparator,
pairComparator,
memorySegments,
ioManager,
useBitmapFilters);
}
/**
* Set new input for probe side
*
* @throws IOException
*/
public void reopenProbe(MutableObjectIterator<V2> probeInput) throws IOException {
reopenHashTable.reopenProbe(probeInput);
}
}
| ReusingBuildFirstReOpenableHashJoinIterator |
java | apache__camel | components/camel-rss/src/main/java/org/apache/camel/component/rss/RssConstants.java | {
"start": 922,
"end": 1295
} | class ____ {
/**
* Header key for the {@link com.rometools.rome.feed.synd.SyndFeed} object is stored on the in message on the
* exchange.
*/
@Metadata(description = "The entire `SyncFeed` object.", javaType = "Object")
public static final String RSS_FEED = "CamelRssFeed";
private RssConstants() {
// Utility class
}
}
| RssConstants |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/spi/ReflectionOptimizer.java | {
"start": 765,
"end": 1120
} | interface ____ {
/**
* Get the name of all properties.
*/
String[] getPropertyNames();
/**
* Get the value of all properties from the given entity
*/
Object[] getPropertyValues(Object object);
/**
* Set all property values into an entity instance.
*/
void setPropertyValues(Object object, Object[] values);
}
}
| AccessOptimizer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/runtime/IpScriptFieldExistsQuery.java | {
"start": 647,
"end": 1293
} | class ____ extends AbstractIpScriptFieldQuery {
public IpScriptFieldExistsQuery(Script script, IpFieldScript.LeafFactory leafFactory, String fieldName) {
super(script, leafFactory, fieldName);
}
@Override
protected boolean matches(BytesRef[] values, int count) {
return count > 0;
}
@Override
public final String toString(String field) {
if (fieldName().contentEquals(field)) {
return getClass().getSimpleName();
}
return fieldName() + ":" + getClass().getSimpleName();
}
// Superclass's equals and hashCode are great for this class
}
| IpScriptFieldExistsQuery |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NullNeedsCastForVarargsTest.java | {
"start": 4076,
"end": 4577
} | class ____ {
void test() {
// BUG: Diagnostic contains:
assertThat(Stream.of("a")).containsExactly(null);
}
}
""")
.doTest();
}
@Test
public void containsAnyOf_bareNull() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
| Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/dirty/BasicLazyPropertyUpdateTest.java | {
"start": 3896,
"end": 4523
} | class ____ {
@Id
private Long id;
private String name;
@Basic(fetch = FetchType.LAZY)
private String lazyProperty;
public TestEntity() {
}
public TestEntity(Long id, String name, String lazyProperty) {
this.id = id;
this.name = name;
this.lazyProperty = lazyProperty;
}
public void setName(String name) {
this.name = name;
}
public void setLazyProperty(String lazyProperty) {
this.lazyProperty = lazyProperty;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public String getLazyProperty() {
return lazyProperty;
}
}
}
| TestEntity |
java | apache__kafka | connect/api/src/main/java/org/apache/kafka/connect/data/Schema.java | {
"start": 2239,
"end": 7864
} | enum ____ {
/**
* 8-bit signed integer
* <p>
* Note that if you have an unsigned 8-bit data source, {@link Type#INT16} will be required to safely capture all valid values
*/
INT8,
/**
* 16-bit signed integer
* <p>
* Note that if you have an unsigned 16-bit data source, {@link Type#INT32} will be required to safely capture all valid values
*/
INT16,
/**
* 32-bit signed integer
* <p>
* Note that if you have an unsigned 32-bit data source, {@link Type#INT64} will be required to safely capture all valid values
*/
INT32,
/**
* 64-bit signed integer
* <p>
* Note that if you have an unsigned 64-bit data source, the {@link Decimal} logical type (encoded as {@link Type#BYTES})
* will be required to safely capture all valid values
*/
INT64,
/**
* 32-bit IEEE 754 floating point number
*/
FLOAT32,
/**
* 64-bit IEEE 754 floating point number
*/
FLOAT64,
/**
* Boolean value (true or false)
*/
BOOLEAN,
/**
* Character string that supports all Unicode characters.
* <p>
* Note that this does not imply any specific encoding (e.g. UTF-8) as this is an in-memory representation.
*/
STRING,
/**
* Sequence of unsigned 8-bit bytes
*/
BYTES,
/**
* An ordered sequence of elements, each of which shares the same type.
*/
ARRAY,
/**
* A mapping from keys to values. Both keys and values can be arbitrarily complex types, including complex types
* such as {@link Struct}.
*/
MAP,
/**
* A structured record containing a set of named fields, each field using a fixed, independent {@link Schema}.
*/
STRUCT;
private final String name;
Type() {
this.name = this.name().toLowerCase(Locale.ROOT);
}
public String getName() {
return name;
}
public boolean isPrimitive() {
return switch (this) {
case INT8, INT16, INT32, INT64, FLOAT32, FLOAT64, BOOLEAN, STRING, BYTES -> true;
default -> false;
};
}
}
Schema INT8_SCHEMA = SchemaBuilder.int8().build();
Schema INT16_SCHEMA = SchemaBuilder.int16().build();
Schema INT32_SCHEMA = SchemaBuilder.int32().build();
Schema INT64_SCHEMA = SchemaBuilder.int64().build();
Schema FLOAT32_SCHEMA = SchemaBuilder.float32().build();
Schema FLOAT64_SCHEMA = SchemaBuilder.float64().build();
Schema BOOLEAN_SCHEMA = SchemaBuilder.bool().build();
Schema STRING_SCHEMA = SchemaBuilder.string().build();
Schema BYTES_SCHEMA = SchemaBuilder.bytes().build();
Schema OPTIONAL_INT8_SCHEMA = SchemaBuilder.int8().optional().build();
Schema OPTIONAL_INT16_SCHEMA = SchemaBuilder.int16().optional().build();
Schema OPTIONAL_INT32_SCHEMA = SchemaBuilder.int32().optional().build();
Schema OPTIONAL_INT64_SCHEMA = SchemaBuilder.int64().optional().build();
Schema OPTIONAL_FLOAT32_SCHEMA = SchemaBuilder.float32().optional().build();
Schema OPTIONAL_FLOAT64_SCHEMA = SchemaBuilder.float64().optional().build();
Schema OPTIONAL_BOOLEAN_SCHEMA = SchemaBuilder.bool().optional().build();
Schema OPTIONAL_STRING_SCHEMA = SchemaBuilder.string().optional().build();
Schema OPTIONAL_BYTES_SCHEMA = SchemaBuilder.bytes().optional().build();
/**
* @return the type of this schema
*/
Type type();
/**
* @return true if this field is optional, false otherwise
*/
boolean isOptional();
/**
* @return the default value for this schema
*/
Object defaultValue();
/**
* @return the name of this schema
*/
String name();
/**
* Get the optional version of the schema. If a version is included, newer versions <b>must</b> be larger than older ones.
* @return the version of this schema
*/
Integer version();
/**
* @return the documentation for this schema
*/
String doc();
/**
* Get a map of schema parameters.
* @return Map containing parameters for this schema, or null if there are no parameters
*/
Map<String, String> parameters();
/**
* Get the key schema for this map schema. Throws a {@link DataException} if this schema is not a map.
* @return the key schema
*/
Schema keySchema();
/**
* Get the value schema for this map or array schema. Throws a {@link DataException} if this schema is not a map or array.
* @return the value schema
*/
Schema valueSchema();
/**
* Get the list of Fields for this Schema. Throws a {@link DataException} if this schema is not a
* {@link Schema.Type#STRUCT}.
*
* @return the list of fields for this Schema
*/
List<Field> fields();
/**
* Get a {@link Field} for this Schema by name. Throws a {@link DataException} if this schema is not a
* {@link Schema.Type#STRUCT}.
*
* @param fieldName the name of the field to look up
* @return the Field object for the specified field, or null if there is no field with the given name
*/
Field field(String fieldName);
/**
* Return a concrete instance of the {@link Schema}
* @return the {@link Schema}
*/
Schema schema();
}
| Type |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/write/GeneratorCloseTest.java | {
"start": 926,
"end": 6407
} | class ____ extends JacksonCoreTestBase
{
/**
* This unit test checks the default behaviour; with no auto-close, no
* automatic closing should occur, nor explicit one unless specific
* forcing method is used.
*/
@Test
void noAutoCloseGenerator() throws Exception
{
JsonFactory f = new JsonFactory();
// Check the default settings
assertTrue(f.isEnabled(StreamWriteFeature.AUTO_CLOSE_TARGET));
// then change
f = f.rebuild().disable(StreamWriteFeature.AUTO_CLOSE_TARGET).build();
assertFalse(f.isEnabled(StreamWriteFeature.AUTO_CLOSE_TARGET));
@SuppressWarnings("resource")
StringWriterForTesting output = new StringWriterForTesting();
JsonGenerator g = f.createGenerator(ObjectWriteContext.empty(), output);
// shouldn't be closed to begin with...
assertFalse(output.isClosed());
g.writeNumber(39);
// regular close won't close it either:
g.close();
assertFalse(output.isClosed());
}
@Test
void closeGenerator() throws Exception
{
JsonFactory f = JsonFactory.builder()
.enable(StreamWriteFeature.AUTO_CLOSE_TARGET).build();
@SuppressWarnings("resource")
StringWriterForTesting output = new StringWriterForTesting();
JsonGenerator g = f.createGenerator(ObjectWriteContext.empty(), output);
// shouldn't be closed to begin with...
assertFalse(output.isClosed());
g.writeNumber(39);
// but close() should now close the writer
g.close();
assertTrue(output.isClosed());
}
@Test
void noAutoCloseOutputStream() throws Exception
{
JsonFactory f = JsonFactory.builder()
.disable(StreamWriteFeature.AUTO_CLOSE_TARGET).build();
@SuppressWarnings("resource")
ByteOutputStreamForTesting output = new ByteOutputStreamForTesting();
JsonGenerator g = f.createGenerator(ObjectWriteContext.empty(), output, JsonEncoding.UTF8);
assertFalse(output.isClosed());
g.writeNumber(39);
g.close();
assertFalse(output.isClosed());
}
@Test
void autoCloseArraysAndObjects()
throws Exception
{
JsonFactory f = new JsonFactory();
// let's verify default setting, first:
assertTrue(f.isEnabled(StreamWriteFeature.AUTO_CLOSE_CONTENT));
StringWriter sw = new StringWriter();
// First, test arrays:
JsonGenerator g = f.createGenerator(ObjectWriteContext.empty(), sw);
g.writeStartArray();
g.close();
assertEquals("[]", sw.toString());
// Then objects
sw = new StringWriter();
g = f.createGenerator(ObjectWriteContext.empty(), sw);
g.writeStartObject();
g.close();
assertEquals("{}", sw.toString());
}
@Test
void noAutoCloseArraysAndObjects()
throws Exception
{
JsonFactory f = JsonFactory.builder()
.disable(StreamWriteFeature.AUTO_CLOSE_CONTENT)
.build();
StringWriter sw = new StringWriter();
JsonGenerator g = f.createGenerator(ObjectWriteContext.empty(), sw);
g.writeStartArray();
g.close();
// shouldn't close
assertEquals("[", sw.toString());
// Then objects
sw = new StringWriter();
g = f.createGenerator(ObjectWriteContext.empty(), sw);
g.writeStartObject();
g.close();
assertEquals("{", sw.toString());
}
@SuppressWarnings("resource")
@Test
void autoFlushOrNot() throws Exception
{
JsonFactory f = new JsonFactory();
assertTrue(f.isEnabled(StreamWriteFeature.FLUSH_PASSED_TO_STREAM));
StringWriterForTesting sw = new StringWriterForTesting();
JsonGenerator g = f.createGenerator(ObjectWriteContext.empty(), sw);
g.writeStartArray();
g.writeEndArray();
assertEquals(0, sw.flushCount);
g.flush();
assertEquals(1, sw.flushCount);
g.close();
// ditto with stream
ByteOutputStreamForTesting bytes = new ByteOutputStreamForTesting();
g = f.createGenerator(ObjectWriteContext.empty(), bytes, JsonEncoding.UTF8);
g.writeStartArray();
g.writeEndArray();
assertEquals(0, bytes.flushCount);
g.flush();
assertEquals(1, bytes.flushCount);
assertEquals(2, bytes.toByteArray().length);
g.close();
// then disable and we should not see flushing again...
f = f.rebuild()
.disable(StreamWriteFeature.FLUSH_PASSED_TO_STREAM)
.build();
// first with a Writer
sw = new StringWriterForTesting();
g = f.createGenerator(ObjectWriteContext.empty(), sw);
g.writeStartArray();
g.writeEndArray();
assertEquals(0, sw.flushCount);
g.flush();
assertEquals(0, sw.flushCount);
g.close();
assertEquals("[]", sw.toString());
// and then with OutputStream
bytes = new ByteOutputStreamForTesting();
g = f.createGenerator(ObjectWriteContext.empty(), bytes, JsonEncoding.UTF8);
g.writeStartArray();
g.writeEndArray();
assertEquals(0, bytes.flushCount);
g.flush();
assertEquals(0, bytes.flushCount);
g.close();
assertEquals(2, bytes.toByteArray().length);
}
}
| GeneratorCloseTest |
java | apache__camel | components/camel-debezium/camel-debezium-oracle/src/generated/java/org/apache/camel/component/debezium/oracle/configuration/OracleConnectorEmbeddedDebeziumConfiguration.java | {
"start": 75738,
"end": 92677
} | class ____ should be used to store and
* recover database schema changes. The configuration properties for the
* history are prefixed with the 'schema.history.internal.' string.
*/
public void setSchemaHistoryInternal(String schemaHistoryInternal) {
this.schemaHistoryInternal = schemaHistoryInternal;
}
public String getSchemaHistoryInternal() {
return schemaHistoryInternal;
}
/**
* Regular expressions matching columns to exclude from change events
*/
public void setColumnExcludeList(String columnExcludeList) {
this.columnExcludeList = columnExcludeList;
}
public String getColumnExcludeList() {
return columnExcludeList;
}
/**
* The maximum number of milliseconds that a LogMiner session lives for
* before being restarted. Defaults to 0 (indefinite until a log switch
* occurs)
*/
public void setLogMiningSessionMaxMs(long logMiningSessionMaxMs) {
this.logMiningSessionMaxMs = logMiningSessionMaxMs;
}
public long getLogMiningSessionMaxMs() {
return logMiningSessionMaxMs;
}
/**
* Resolvable hostname or IP address of the database server.
*/
public void setDatabaseHostname(String databaseHostname) {
this.databaseHostname = databaseHostname;
}
public String getDatabaseHostname() {
return databaseHostname;
}
/**
* The minimum SCN interval size that this connector will try to read from
* redo/archive logs.
*/
public void setLogMiningBatchSizeMin(long logMiningBatchSizeMin) {
this.logMiningBatchSizeMin = logMiningBatchSizeMin;
}
public long getLogMiningBatchSizeMin() {
return logMiningBatchSizeMin;
}
/**
* The maximum time in milliseconds to wait for connection validation to
* complete. Defaults to 60 seconds.
*/
public void setConnectionValidationTimeoutMs(
long connectionValidationTimeoutMs) {
this.connectionValidationTimeoutMs = connectionValidationTimeoutMs;
}
public long getConnectionValidationTimeoutMs() {
return connectionValidationTimeoutMs;
}
@Override
protected Configuration createConnectorConfiguration() {
final Configuration.Builder configBuilder = Configuration.create();
addPropertyIfNotNull(configBuilder, "snapshot.locking.mode", snapshotLockingMode);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.drop.on.stop", logMiningBufferDropOnStop);
addPropertyIfNotNull(configBuilder, "message.key.columns", messageKeyColumns);
addPropertyIfNotNull(configBuilder, "transaction.metadata.factory", transactionMetadataFactory);
addPropertyIfNotNull(configBuilder, "custom.metric.tags", customMetricTags);
addPropertyIfNotNull(configBuilder, "openlogreplicator.host", openlogreplicatorHost);
addPropertyIfNotNull(configBuilder, "signal.enabled.channels", signalEnabledChannels);
addPropertyIfNotNull(configBuilder, "include.schema.changes", includeSchemaChanges);
addPropertyIfNotNull(configBuilder, "log.mining.include.redo.sql", logMiningIncludeRedoSql);
addPropertyIfNotNull(configBuilder, "signal.data.collection", signalDataCollection);
addPropertyIfNotNull(configBuilder, "log.mining.readonly.hostname", logMiningReadonlyHostname);
addPropertyIfNotNull(configBuilder, "converters", converters);
addPropertyIfNotNull(configBuilder, "snapshot.fetch.size", snapshotFetchSize);
addPropertyIfNotNull(configBuilder, "openlineage.integration.job.tags", openlineageIntegrationJobTags);
addPropertyIfNotNull(configBuilder, "snapshot.lock.timeout.ms", snapshotLockTimeoutMs);
addPropertyIfNotNull(configBuilder, "log.mining.scn.gap.detection.gap.size.min", logMiningScnGapDetectionGapSizeMin);
addPropertyIfNotNull(configBuilder, "database.dbname", databaseDbname);
addPropertyIfNotNull(configBuilder, "snapshot.tables.order.by.row.count", snapshotTablesOrderByRowCount);
addPropertyIfNotNull(configBuilder, "log.mining.sleep.time.default.ms", logMiningSleepTimeDefaultMs);
addPropertyIfNotNull(configBuilder, "snapshot.select.statement.overrides", snapshotSelectStatementOverrides);
addPropertyIfNotNull(configBuilder, "log.mining.batch.size.increment", logMiningBatchSizeIncrement);
addPropertyIfNotNull(configBuilder, "log.mining.archive.log.only.scn.poll.interval.ms", logMiningArchiveLogOnlyScnPollIntervalMs);
addPropertyIfNotNull(configBuilder, "log.mining.restart.connection", logMiningRestartConnection);
addPropertyIfNotNull(configBuilder, "legacy.decimal.handling.strategy", legacyDecimalHandlingStrategy);
addPropertyIfNotNull(configBuilder, "table.exclude.list", tableExcludeList);
addPropertyIfNotNull(configBuilder, "max.batch.size", maxBatchSize);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.infinispan.cache.transactions", logMiningBufferInfinispanCacheTransactions);
addPropertyIfNotNull(configBuilder, "topic.naming.strategy", topicNamingStrategy);
addPropertyIfNotNull(configBuilder, "snapshot.mode", snapshotMode);
addPropertyIfNotNull(configBuilder, "snapshot.mode.configuration.based.snapshot.data", snapshotModeConfigurationBasedSnapshotData);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.ehcache.schemachanges.config", logMiningBufferEhcacheSchemachangesConfig);
addPropertyIfNotNull(configBuilder, "openlineage.integration.job.owners", openlineageIntegrationJobOwners);
addPropertyIfNotNull(configBuilder, "openlineage.integration.config.file.path", openlineageIntegrationConfigFilePath);
addPropertyIfNotNull(configBuilder, "retriable.restart.connector.wait.ms", retriableRestartConnectorWaitMs);
addPropertyIfNotNull(configBuilder, "snapshot.delay.ms", snapshotDelayMs);
addPropertyIfNotNull(configBuilder, "log.mining.strategy", logMiningStrategy);
addPropertyIfNotNull(configBuilder, "executor.shutdown.timeout.ms", executorShutdownTimeoutMs);
addPropertyIfNotNull(configBuilder, "snapshot.mode.configuration.based.snapshot.on.data.error", snapshotModeConfigurationBasedSnapshotOnDataError);
addPropertyIfNotNull(configBuilder, "schema.history.internal.file.filename", schemaHistoryInternalFileFilename);
addPropertyIfNotNull(configBuilder, "tombstones.on.delete", tombstonesOnDelete);
addPropertyIfNotNull(configBuilder, "decimal.handling.mode", decimalHandlingMode);
addPropertyIfNotNull(configBuilder, "binary.handling.mode", binaryHandlingMode);
addPropertyIfNotNull(configBuilder, "database.out.server.name", databaseOutServerName);
addPropertyIfNotNull(configBuilder, "openlineage.integration.dataset.kafka.bootstrap.servers", openlineageIntegrationDatasetKafkaBootstrapServers);
addPropertyIfNotNull(configBuilder, "archive.log.hours", archiveLogHours);
addPropertyIfNotNull(configBuilder, "snapshot.include.collection.list", snapshotIncludeCollectionList);
addPropertyIfNotNull(configBuilder, "snapshot.mode.configuration.based.start.stream", snapshotModeConfigurationBasedStartStream);
addPropertyIfNotNull(configBuilder, "database.pdb.name", databasePdbName);
addPropertyIfNotNull(configBuilder, "database.connection.adapter", databaseConnectionAdapter);
addPropertyIfNotNull(configBuilder, "log.mining.flush.table.name", logMiningFlushTableName);
addPropertyIfNotNull(configBuilder, "openlogreplicator.source", openlogreplicatorSource);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.type", logMiningBufferType);
addPropertyIfNotNull(configBuilder, "signal.poll.interval.ms", signalPollIntervalMs);
addPropertyIfNotNull(configBuilder, "notification.enabled.channels", notificationEnabledChannels);
addPropertyIfNotNull(configBuilder, "event.processing.failure.handling.mode", eventProcessingFailureHandlingMode);
addPropertyIfNotNull(configBuilder, "snapshot.max.threads", snapshotMaxThreads);
addPropertyIfNotNull(configBuilder, "notification.sink.topic.name", notificationSinkTopicName);
addPropertyIfNotNull(configBuilder, "snapshot.mode.custom.name", snapshotModeCustomName);
addPropertyIfNotNull(configBuilder, "log.mining.query.filter.mode", logMiningQueryFilterMode);
addPropertyIfNotNull(configBuilder, "schema.name.adjustment.mode", schemaNameAdjustmentMode);
addPropertyIfNotNull(configBuilder, "log.mining.batch.size.default", logMiningBatchSizeDefault);
addPropertyIfNotNull(configBuilder, "table.include.list", tableIncludeList);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.ehcache.processedtransactions.config", logMiningBufferEhcacheProcessedtransactionsConfig);
addPropertyIfNotNull(configBuilder, "streaming.delay.ms", streamingDelayMs);
addPropertyIfNotNull(configBuilder, "openlineage.integration.job.namespace", openlineageIntegrationJobNamespace);
addPropertyIfNotNull(configBuilder, "database.query.timeout.ms", databaseQueryTimeoutMs);
addPropertyIfNotNull(configBuilder, "query.fetch.size", queryFetchSize);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.ehcache.global.config", logMiningBufferEhcacheGlobalConfig);
addPropertyIfNotNull(configBuilder, "log.mining.sleep.time.min.ms", logMiningSleepTimeMinMs);
addPropertyIfNotNull(configBuilder, "unavailable.value.placeholder", unavailableValuePlaceholder);
addPropertyIfNotNull(configBuilder, "log.mining.clientid.include.list", logMiningClientidIncludeList);
addPropertyIfNotNull(configBuilder, "heartbeat.action.query", heartbeatActionQuery);
addPropertyIfNotNull(configBuilder, "log.mining.clientid.exclude.list", logMiningClientidExcludeList);
addPropertyIfNotNull(configBuilder, "poll.interval.ms", pollIntervalMs);
addPropertyIfNotNull(configBuilder, "guardrail.collections.max", guardrailCollectionsMax);
addPropertyIfNotNull(configBuilder, "log.mining.username.include.list", logMiningUsernameIncludeList);
addPropertyIfNotNull(configBuilder, "lob.enabled", lobEnabled);
addPropertyIfNotNull(configBuilder, "interval.handling.mode", intervalHandlingMode);
addPropertyIfNotNull(configBuilder, "heartbeat.topics.prefix", heartbeatTopicsPrefix);
addPropertyIfNotNull(configBuilder, "log.mining.archive.log.only.mode", logMiningArchiveLogOnlyMode);
addPropertyIfNotNull(configBuilder, "log.mining.path.dictionary", logMiningPathDictionary);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.infinispan.cache.schema_changes", logMiningBufferInfinispanCacheSchemaChanges);
addPropertyIfNotNull(configBuilder, "log.mining.sleep.time.max.ms", logMiningSleepTimeMaxMs);
addPropertyIfNotNull(configBuilder, "database.user", databaseUser);
addPropertyIfNotNull(configBuilder, "datatype.propagate.source.type", datatypePropagateSourceType);
addPropertyIfNotNull(configBuilder, "incremental.snapshot.watermarking.strategy", incrementalSnapshotWatermarkingStrategy);
addPropertyIfNotNull(configBuilder, "heartbeat.interval.ms", heartbeatIntervalMs);
addPropertyIfNotNull(configBuilder, "snapshot.mode.configuration.based.snapshot.on.schema.error", snapshotModeConfigurationBasedSnapshotOnSchemaError);
addPropertyIfNotNull(configBuilder, "schema.history.internal.skip.unparseable.ddl", schemaHistoryInternalSkipUnparseableDdl);
addPropertyIfNotNull(configBuilder, "column.include.list", columnIncludeList);
addPropertyIfNotNull(configBuilder, "log.mining.username.exclude.list", logMiningUsernameExcludeList);
addPropertyIfNotNull(configBuilder, "column.propagate.source.type", columnPropagateSourceType);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.ehcache.transactions.config", logMiningBufferEhcacheTransactionsConfig);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.infinispan.cache.processed_transactions", logMiningBufferInfinispanCacheProcessedTransactions);
addPropertyIfNotNull(configBuilder, "errors.max.retries", errorsMaxRetries);
addPropertyIfNotNull(configBuilder, "database.password", databasePassword);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.infinispan.cache.events", logMiningBufferInfinispanCacheEvents);
addPropertyIfNotNull(configBuilder, "skipped.operations", skippedOperations);
addPropertyIfNotNull(configBuilder, "openlineage.integration.job.description", openlineageIntegrationJobDescription);
addPropertyIfNotNull(configBuilder, "archive.destination.name", archiveDestinationName);
addPropertyIfNotNull(configBuilder, "log.mining.scn.gap.detection.time.interval.max.ms", logMiningScnGapDetectionTimeIntervalMaxMs);
addPropertyIfNotNull(configBuilder, "extended.headers.enabled", extendedHeadersEnabled);
addPropertyIfNotNull(configBuilder, "max.queue.size", maxQueueSize);
addPropertyIfNotNull(configBuilder, "guardrail.collections.limit.action", guardrailCollectionsLimitAction);
addPropertyIfNotNull(configBuilder, "rac.nodes", racNodes);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.infinispan.cache.global", logMiningBufferInfinispanCacheGlobal);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.transaction.events.threshold", logMiningBufferTransactionEventsThreshold);
addPropertyIfNotNull(configBuilder, "log.mining.transaction.retention.ms", logMiningTransactionRetentionMs);
addPropertyIfNotNull(configBuilder, "provide.transaction.metadata", provideTransactionMetadata);
addPropertyIfNotNull(configBuilder, "schema.history.internal.store.only.captured.tables.ddl", schemaHistoryInternalStoreOnlyCapturedTablesDdl);
addPropertyIfNotNull(configBuilder, "schema.history.internal.store.only.captured.databases.ddl", schemaHistoryInternalStoreOnlyCapturedDatabasesDdl);
addPropertyIfNotNull(configBuilder, "snapshot.database.errors.max.retries", snapshotDatabaseErrorsMaxRetries);
addPropertyIfNotNull(configBuilder, "topic.prefix", topicPrefix);
addPropertyIfNotNull(configBuilder, "include.schema.comments", includeSchemaComments);
addPropertyIfNotNull(configBuilder, "sourceinfo.struct.maker", sourceinfoStructMaker);
addPropertyIfNotNull(configBuilder, "openlineage.integration.enabled", openlineageIntegrationEnabled);
addPropertyIfNotNull(configBuilder, "openlogreplicator.port", openlogreplicatorPort);
addPropertyIfNotNull(configBuilder, "log.mining.buffer.ehcache.events.config", logMiningBufferEhcacheEventsConfig);
addPropertyIfNotNull(configBuilder, "log.mining.batch.size.max", logMiningBatchSizeMax);
addPropertyIfNotNull(configBuilder, "max.queue.size.in.bytes", maxQueueSizeInBytes);
addPropertyIfNotNull(configBuilder, "database.url", databaseUrl);
addPropertyIfNotNull(configBuilder, "snapshot.mode.configuration.based.snapshot.schema", snapshotModeConfigurationBasedSnapshotSchema);
addPropertyIfNotNull(configBuilder, "time.precision.mode", timePrecisionMode);
addPropertyIfNotNull(configBuilder, "post.processors", postProcessors);
addPropertyIfNotNull(configBuilder, "database.port", databasePort);
addPropertyIfNotNull(configBuilder, "log.mining.sleep.time.increment.ms", logMiningSleepTimeIncrementMs);
addPropertyIfNotNull(configBuilder, "schema.history.internal", schemaHistoryInternal);
addPropertyIfNotNull(configBuilder, "column.exclude.list", columnExcludeList);
addPropertyIfNotNull(configBuilder, "log.mining.session.max.ms", logMiningSessionMaxMs);
addPropertyIfNotNull(configBuilder, "database.hostname", databaseHostname);
addPropertyIfNotNull(configBuilder, "log.mining.batch.size.min", logMiningBatchSizeMin);
addPropertyIfNotNull(configBuilder, "connection.validation.timeout.ms", connectionValidationTimeoutMs);
return configBuilder.build();
}
@Override
protected Class configureConnectorClass() {
return OracleConnector.class;
}
@Override
protected ConfigurationValidation validateConnectorConfiguration() {
if (isFieldValueNotSet(databasePassword)) {
return ConfigurationValidation.notValid("Required field 'databasePassword' must be set.");
}
if (isFieldValueNotSet(topicPrefix)) {
return ConfigurationValidation.notValid("Required field 'topicPrefix' must be set.");
}
return ConfigurationValidation.valid();
}
@Override
public String getConnectorDatabaseType() {
return "oracle";
}
} | that |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/SqlCallExpression.java | {
"start": 1207,
"end": 1516
} | class ____ into the {@code flink-table-api-java} module, however,
* since this expression is crucial for catalogs when defining persistable computed columns and
* watermark strategies, we keep it in {@code flink-table-common} to keep the dependencies of
* catalogs low.
*/
@PublicEvolving
public final | belongs |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-artifact/src/main/java/org/apache/maven/plugin/coreit/InstallArtifactsMojo.java | {
"start": 3993,
"end": 5555
} | class ____ implements ArtifactRepositoryLayout {
private static final char ARTIFACT_SEPARATOR = '-';
private static final char GROUP_SEPARATOR = '.';
@Override
public String getId() {
return "id";
}
public String pathOf(Artifact artifact) {
ArtifactHandler artifactHandler = artifact.getArtifactHandler();
StringBuilder path = new StringBuilder();
path.append(artifact.getArtifactId()).append(ARTIFACT_SEPARATOR).append(artifact.getVersion());
if (artifact.hasClassifier()) {
path.append(ARTIFACT_SEPARATOR).append(artifact.getClassifier());
}
if (artifactHandler.getExtension() != null
&& artifactHandler.getExtension().length() > 0) {
path.append(GROUP_SEPARATOR).append(artifactHandler.getExtension());
}
return path.toString();
}
public String pathOfLocalRepositoryMetadata(ArtifactMetadata metadata, ArtifactRepository repository) {
return pathOfRepositoryMetadata(metadata.getLocalFilename(repository));
}
private String pathOfRepositoryMetadata(String filename) {
StringBuilder path = new StringBuilder();
path.append(filename);
return path.toString();
}
public String pathOfRemoteRepositoryMetadata(ArtifactMetadata metadata) {
return pathOfRepositoryMetadata(metadata.getRemoteFilename());
}
}
}
| FlatRepositoryLayout |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/RequestCacheConfigurerTests.java | {
"start": 15304,
"end": 15664
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.anyRequest().authenticated())
.formLogin(withDefaults());
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static | RequestCacheDefaultsConfig |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/component/sql/SqlProducerReuseParameterTest.java | {
"start": 1444,
"end": 3081
} | class ____ extends CamelTestSupport {
private EmbeddedDatabase db;
@Override
public void doPreSetup() throws Exception {
db = new EmbeddedDatabaseBuilder()
.setName(getClass().getSimpleName())
.setType(EmbeddedDatabaseType.H2)
.addScript("sql/createAndPopulateDatabase.sql").build();
}
@Override
public void doPostTearDown() throws Exception {
if (db != null) {
db.shutdown();
}
}
@Test
public void testReuseParameter() {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
fluentTemplate.to("direct:start").withHeader("id", "2").withHeader("lic", "ASF").send();
List<?> received = assertIsInstanceOf(List.class, mock.getReceivedExchanges().get(0).getIn().getBody());
assertEquals(2, received.size());
Map<?, ?> row = assertIsInstanceOf(Map.class, received.get(0));
assertEquals("AMQ", row.get("PROJECT"));
row = assertIsInstanceOf(Map.class, received.get(1));
assertEquals("Linux", row.get("PROJECT"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
getContext().getComponent("sql", SqlComponent.class).setDataSource(db);
from("direct:start")
.to("sql:select * from projects where (id = :#id and license = :#lic) or (id > :#id) order by project")
.to("mock:result");
}
};
}
}
| SqlProducerReuseParameterTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/version/UserVersionTest.java | {
"start": 2188,
"end": 2700
} | class ____ implements Serializable {
private final long rev;
public CustomVersion(long rev) {
this.rev = rev;
}
public long getRev() {
return rev;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
CustomVersion that = (CustomVersion) o;
return rev == that.rev;
}
@Override
public int hashCode() {
return Objects.hash( rev );
}
}
public static | CustomVersion |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/consumer/InputGateSpecUtils.java | {
"start": 1261,
"end": 6234
} | class ____ {
public static final int DEFAULT_MAX_REQUIRED_BUFFERS_PER_GATE_FOR_BATCH = 1000;
public static final int DEFAULT_MAX_REQUIRED_BUFFERS_PER_GATE_FOR_STREAM = Integer.MAX_VALUE;
public static GateBuffersSpec createGateBuffersSpec(
Optional<Integer> configuredMaxRequiredBuffersPerGate,
int configuredNetworkBuffersPerChannel,
int configuredFloatingNetworkBuffersPerGate,
ResultPartitionType partitionType,
int numInputChannels,
boolean enableTieredStorage) {
int maxRequiredBuffersThresholdPerGate =
getEffectiveMaxRequiredBuffersPerGate(
partitionType, configuredMaxRequiredBuffersPerGate, enableTieredStorage);
int targetRequiredBuffersPerGate =
getRequiredBuffersTargetPerGate(
numInputChannels, configuredNetworkBuffersPerChannel);
int targetTotalBuffersPerGate =
getTotalBuffersTargetPerGate(
numInputChannels,
configuredNetworkBuffersPerChannel,
configuredFloatingNetworkBuffersPerGate);
int requiredBuffersPerGate =
Math.min(maxRequiredBuffersThresholdPerGate, targetRequiredBuffersPerGate);
int effectiveExclusiveBuffersPerChannel =
getExclusiveBuffersPerChannel(
configuredNetworkBuffersPerChannel,
numInputChannels,
requiredBuffersPerGate);
int effectiveExclusiveBuffersPerGate =
getEffectiveExclusiveBuffersPerGate(
numInputChannels, effectiveExclusiveBuffersPerChannel);
int requiredFloatingBuffers = requiredBuffersPerGate - effectiveExclusiveBuffersPerGate;
int totalFloatingBuffers = targetTotalBuffersPerGate - effectiveExclusiveBuffersPerGate;
checkState(requiredFloatingBuffers > 0, "Must be positive.");
checkState(
requiredFloatingBuffers <= totalFloatingBuffers,
"Wrong number of floating buffers.");
return new GateBuffersSpec(
effectiveExclusiveBuffersPerChannel,
requiredFloatingBuffers,
totalFloatingBuffers,
targetTotalBuffersPerGate);
}
@VisibleForTesting
static int getEffectiveMaxRequiredBuffersPerGate(
ResultPartitionType partitionType,
Optional<Integer> configuredMaxRequiredBuffersPerGate,
boolean enableTieredStorage) {
return configuredMaxRequiredBuffersPerGate.orElseGet(
() ->
partitionType.isPipelinedOrPipelinedBoundedResultPartition()
// hybrid partition may calculate a backlog that is larger
// than the accurate value. If all buffers are floating, it
// will seriously affect the performance.
|| (partitionType.isHybridResultPartition()
&& !enableTieredStorage)
? DEFAULT_MAX_REQUIRED_BUFFERS_PER_GATE_FOR_STREAM
: DEFAULT_MAX_REQUIRED_BUFFERS_PER_GATE_FOR_BATCH);
}
/**
* Since at least one floating buffer is required, the number of required buffers is reduced by
* 1, and then the average number of buffers per channel is calculated. Returning the minimum
* value to ensure that the number of required buffers per gate is not more than the given
* requiredBuffersPerGate.}.
*/
private static int getExclusiveBuffersPerChannel(
int configuredNetworkBuffersPerChannel,
int numInputChannels,
int requiredBuffersPerGate) {
checkArgument(numInputChannels > 0, "Must be positive.");
checkArgument(requiredBuffersPerGate >= 1, "Require at least 1 buffer per gate.");
return Math.min(
configuredNetworkBuffersPerChannel,
(requiredBuffersPerGate - 1) / numInputChannels);
}
private static int getRequiredBuffersTargetPerGate(
int numInputChannels, int configuredNetworkBuffersPerChannel) {
return numInputChannels * configuredNetworkBuffersPerChannel + 1;
}
private static int getTotalBuffersTargetPerGate(
int numInputChannels,
int configuredNetworkBuffersPerChannel,
int configuredFloatingBuffersPerGate) {
return numInputChannels * configuredNetworkBuffersPerChannel
+ configuredFloatingBuffersPerGate;
}
private static int getEffectiveExclusiveBuffersPerGate(
int numInputChannels, int effectiveExclusiveBuffersPerChannel) {
return effectiveExclusiveBuffersPerChannel * numInputChannels;
}
}
| InputGateSpecUtils |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/patterntext/PatternTextValueProcessorTests.java | {
"start": 558,
"end": 7686
} | class ____ extends ESTestCase {
public void testEmpty() throws IOException {
String text = "";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals(text, parts.template());
assertTrue(parts.args().isEmpty());
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testWhitespace() throws IOException {
String text = " ";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals(text, parts.template());
assertTrue(parts.args().isEmpty());
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testWithoutTimestamp() throws IOException {
String text = " some text with arg1 and 2arg2 and 333 ";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals(" some text with and and ", parts.template());
assertThat(parts.args(), Matchers.contains("arg1", "2arg2", "333"));
assertThat(parts.argsInfo(), equalTo(info(16, 21, 26)));
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testWithTimestamp() throws IOException {
String text = " 2021-04-13T13:51:38.000Z some text with arg1 and arg2 and arg3";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals(" some text with and and ", parts.template());
assertThat(parts.args(), Matchers.contains("2021-04-13T13:51:38.000Z", "arg1", "arg2", "arg3"));
assertThat(parts.argsInfo(), equalTo(info(1, 17, 22, 27)));
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testWithDateSpaceTime() throws IOException {
String text = " 2021-04-13 13:51:38 some text with arg1 and arg2 and arg3";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals(" some text with and and ", parts.template());
assertThat(parts.argsInfo(), equalTo(info(1, 2, 18, 23, 28)));
assertThat(parts.args(), Matchers.contains("2021-04-13", "13:51:38", "arg1", "arg2", "arg3"));
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testMalformedDate() throws IOException {
String text = "2020/09/06 10:11:38 Using namespace: kubernetes-dashboard' | HTTP status: 400, message: [1:395]";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals(" Using namespace: kubernetes-dashboard' | HTTP status: message: []", parts.template());
assertThat(parts.argsInfo(), equalTo(info(0, 1, 56, 67)));
assertThat(parts.args(), Matchers.contains("2020/09/06", "10:11:38", "400,", "1:395"));
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testUUID() throws IOException {
String text = "[2020-08-18T00:58:56.751+00:00][15][2354][action_controller][INFO]: [18be2355-6306-4a00-9db9-f0696aa1a225] "
+ "some text with arg1 and arg2";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals("[][][][action_controller][INFO]: [] some text with and ", parts.template());
assertThat(parts.argsInfo(), equalTo(info(1, 3, 5, 34, 51, 56)));
assertThat(
parts.args(),
Matchers.contains("2020-08-18T00:58:56.751+00:00", "15", "2354", "18be2355-6306-4a00-9db9-f0696aa1a225", "arg1", "arg2")
);
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testIP() throws IOException {
String text = "[2020-08-18T00:58:56.751+00:00][15][2354][action_controller][INFO]: from 94.168.152.150 and arg1";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals("[][][][action_controller][INFO]: from and ", parts.template());
assertThat(parts.argsInfo(), equalTo(info(1, 3, 5, 38, 43)));
assertThat(parts.args(), Matchers.contains("2020-08-18T00:58:56.751+00:00", "15", "2354", "94.168.152.150", "arg1"));
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testSecondDate() throws IOException {
String text = "[2020-08-18T00:58:56.751+00:00][15][2354][action_controller][INFO]: at 2020-08-18 00:58:56 +0000 and arg1";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals("[][][][action_controller][INFO]: at and ", parts.template());
assertThat(parts.argsInfo(), equalTo(info(1, 3, 5, 36, 37, 38, 43)));
assertThat(
parts.args(),
Matchers.contains("2020-08-18T00:58:56.751+00:00", "15", "2354", "2020-08-18", "00:58:56", "+0000", "arg1")
);
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testWithTimestampStartBrackets() throws IOException {
String text = "[2020-08-18T00:58:56] Found 123 errors for service [cheddar1]";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals("[] Found errors for service []", parts.template());
assertThat(parts.argsInfo(), equalTo(info(1, 9, 30)));
assertThat(parts.args(), Matchers.contains("2020-08-18T00:58:56", "123", "cheddar1"));
assertEquals(text, PatternTextValueProcessor.merge(parts));
}
public void testTemplateIdIsExpectedShape() throws IOException {
String text = "[2020-08-18T00:58:56] Found 123 errors for service [cheddar1]";
PatternTextValueProcessor.Parts parts = PatternTextValueProcessor.split(text);
assertEquals("1l_PtCLQ5xY", parts.templateId());
}
public void testTemplateIdHasVeryFewCollisions() throws IOException {
Set<String> templates = new HashSet<>();
Set<String> ids = new HashSet<>();
for (int i = 0; i < 1000; i++) {
var template = randomTemplate();
var parts = new PatternTextValueProcessor.Parts(template, List.of(), List.of());
templates.add(template);
ids.add(parts.templateId());
}
// This can technically fail due to hash collision, but it should happen quite rarely.
assertEquals(templates.size(), ids.size());
}
private static String randomTemplate() {
StringBuilder sb = new StringBuilder();
int numTokens = randomIntBetween(1, 20);
for (int i = 0; i < numTokens; i++) {
var token = randomBoolean() ? randomAlphaOfLength(between(1, 10)) : "";
sb.append(token);
sb.append(randomDelimiter());
}
return sb.toString();
}
private static String randomDelimiter() {
return randomFrom(List.of(" ", "\n", "\t", "[", "]"));
}
private static List<Arg.Info> info(int... offsets) throws IOException {
return Arrays.stream(offsets).mapToObj(o -> new Arg.Info(Arg.Type.GENERIC, o)).toList();
}
}
| PatternTextValueProcessorTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/desktop/ConstructorPropertiesAnnotationTest.java | {
"start": 593,
"end": 1315
} | class ____ {
// 08-Nov-2015, tatu: Note that in real code we would most likely use same
// names for properties; but here we use different name on purpose to
// ensure that Jackson has no way of binding JSON properties "x" and "y"
// using any other mechanism than via `@ConstructorProperties` annotation
public int _x, _y;
@ConstructorProperties({"x", "y"})
// Same as above; use differing local parameter names so that parameter name
// introspection cannot be used as the source of property names.
public Issue905Bean(int a, int b) {
_x = a;
_y = b;
}
}
// for [databind#1122]
static | Issue905Bean |
java | elastic__elasticsearch | modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java | {
"start": 1388,
"end": 1802
} | class ____ extends AbstractInstrument.Builder<LongUpDownCounter> {
private Builder(String name, String description, String unit) {
super(name, description, unit);
}
@Override
public LongUpDownCounter build(Meter meter) {
return Objects.requireNonNull(meter).upDownCounterBuilder(name).setDescription(description).setUnit(unit).build();
}
}
}
| Builder |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobClientHeartbeatParameters.java | {
"start": 1022,
"end": 1585
} | class ____ extends MessageParameters {
private final JobIDPathParameter jobPathParameter = new JobIDPathParameter();
@Override
public Collection<MessagePathParameter<?>> getPathParameters() {
return Collections.singleton(jobPathParameter);
}
@Override
public Collection<MessageQueryParameter<?>> getQueryParameters() {
return Collections.emptyList();
}
public JobClientHeartbeatParameters resolveJobId(JobID jobId) {
jobPathParameter.resolve(jobId);
return this;
}
}
| JobClientHeartbeatParameters |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/BindToRegistryBeanInitDestroyMethodServiceTest.java | {
"start": 2685,
"end": 3100
} | class ____ implements Service {
private String message;
public FooService(String message) {
this.message = message;
}
public String getMessage() {
return message;
}
public void start() {
this.message = "Started " + message;
}
public void stop() {
this.message = "Stopped";
}
}
}
| FooService |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/OAuthBearerTokenCallback.java | {
"start": 1437,
"end": 4001
} | class ____ implements Callback {
private OAuthBearerToken token = null;
private String errorCode = null;
private String errorDescription = null;
private String errorUri = null;
/**
* Return the (potentially null) token
*
* @return the (potentially null) token
*/
public OAuthBearerToken token() {
return token;
}
/**
* Return the optional (but always non-empty if not null) error code as per
* <a href="https://tools.ietf.org/html/rfc6749#section-5.2">RFC 6749: The OAuth
* 2.0 Authorization Framework</a>.
*
* @return the optional (but always non-empty if not null) error code
*/
public String errorCode() {
return errorCode;
}
/**
* Return the (potentially null) error description as per
* <a href="https://tools.ietf.org/html/rfc6749#section-5.2">RFC 6749: The OAuth
* 2.0 Authorization Framework</a>.
*
* @return the (potentially null) error description
*/
public String errorDescription() {
return errorDescription;
}
/**
* Return the (potentially null) error URI as per
* <a href="https://tools.ietf.org/html/rfc6749#section-5.2">RFC 6749: The OAuth
* 2.0 Authorization Framework</a>.
*
* @return the (potentially null) error URI
*/
public String errorUri() {
return errorUri;
}
/**
* Set the token. All error-related values are cleared.
*
* @param token
* the optional token to set
*/
public void token(OAuthBearerToken token) {
this.token = token;
this.errorCode = null;
this.errorDescription = null;
this.errorUri = null;
}
/**
* Set the error values as per
* <a href="https://tools.ietf.org/html/rfc6749#section-5.2">RFC 6749: The OAuth
* 2.0 Authorization Framework</a>. Any token is cleared.
*
* @param errorCode
* the mandatory error code to set
* @param errorDescription
* the optional error description to set
* @param errorUri
* the optional error URI to set
*/
public void error(String errorCode, String errorDescription, String errorUri) {
if (Objects.requireNonNull(errorCode).isEmpty())
throw new IllegalArgumentException("error code must not be empty");
this.errorCode = errorCode;
this.errorDescription = errorDescription;
this.errorUri = errorUri;
this.token = null;
}
}
| OAuthBearerTokenCallback |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/logical/BinaryLogicProcessor.java | {
"start": 981,
"end": 3329
} | enum ____ implements PredicateBiFunction<Boolean, Boolean, Boolean> {
AND((l, r) -> {
if (Boolean.FALSE.equals(l) || Boolean.FALSE.equals(r)) {
return Boolean.FALSE;
}
if (l == null || r == null) {
return null;
}
return Boolean.logicalAnd(l.booleanValue(), r.booleanValue());
}, "AND"),
OR((l, r) -> {
if (Boolean.TRUE.equals(l) || Boolean.TRUE.equals(r)) {
return Boolean.TRUE;
}
if (l == null || r == null) {
return null;
}
return Boolean.logicalOr(l.booleanValue(), r.booleanValue());
}, "OR");
private final BiFunction<Boolean, Boolean, Boolean> process;
private final String symbol;
BinaryLogicOperation(BiFunction<Boolean, Boolean, Boolean> process, String symbol) {
this.process = process;
this.symbol = symbol;
}
@Override
public String symbol() {
return symbol;
}
@Override
public Boolean apply(Boolean left, Boolean right) {
return process.apply(left, right);
}
@Override
public final Boolean doApply(Boolean left, Boolean right) {
return null;
}
@Override
public String toString() {
return symbol;
}
}
public static final String NAME = "lb";
public BinaryLogicProcessor(Processor left, Processor right, BinaryLogicOperation operation) {
super(left, right, operation);
}
public BinaryLogicProcessor(StreamInput in) throws IOException {
super(in, i -> i.readEnum(BinaryLogicOperation.class));
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected void checkParameter(Object param) {
if (param != null && (param instanceof Boolean) == false) {
throw new QlIllegalArgumentException("A boolean is required; received {}", param);
}
}
@Override
public Object process(Object input) {
Object l = left().process(input);
checkParameter(l);
Object r = right().process(input);
checkParameter(r);
return doProcess(l, r);
}
}
| BinaryLogicOperation |
java | apache__camel | components/camel-azure/camel-azure-servicebus/src/generated/java/org/apache/camel/component/azure/servicebus/ServiceBusEndpointConfigurer.java | {
"start": 743,
"end": 14047
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
ServiceBusEndpoint target = (ServiceBusEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "amqpretryoptions":
case "amqpRetryOptions": target.getConfiguration().setAmqpRetryOptions(property(camelContext, com.azure.core.amqp.AmqpRetryOptions.class, value)); return true;
case "amqptransporttype":
case "amqpTransportType": target.getConfiguration().setAmqpTransportType(property(camelContext, com.azure.core.amqp.AmqpTransportType.class, value)); return true;
case "binary": target.getConfiguration().setBinary(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "clientoptions":
case "clientOptions": target.getConfiguration().setClientOptions(property(camelContext, com.azure.core.util.ClientOptions.class, value)); return true;
case "connectionstring":
case "connectionString": target.getConfiguration().setConnectionString(property(camelContext, java.lang.String.class, value)); return true;
case "credentialtype":
case "credentialType": target.getConfiguration().setCredentialType(property(camelContext, org.apache.camel.component.azure.servicebus.CredentialType.class, value)); return true;
case "enabledeadlettering":
case "enableDeadLettering": target.getConfiguration().setEnableDeadLettering(property(camelContext, boolean.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "fullyqualifiednamespace":
case "fullyQualifiedNamespace": target.getConfiguration().setFullyQualifiedNamespace(property(camelContext, java.lang.String.class, value)); return true;
case "headerfilterstrategy":
case "headerFilterStrategy": target.getConfiguration().setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "maxautolockrenewduration":
case "maxAutoLockRenewDuration": target.getConfiguration().setMaxAutoLockRenewDuration(property(camelContext, java.time.Duration.class, value).toMillis()); return true;
case "maxconcurrentcalls":
case "maxConcurrentCalls": target.getConfiguration().setMaxConcurrentCalls(property(camelContext, int.class, value)); return true;
case "prefetchcount":
case "prefetchCount": target.getConfiguration().setPrefetchCount(property(camelContext, int.class, value)); return true;
case "processorclient":
case "processorClient": target.getConfiguration().setProcessorClient(property(camelContext, com.azure.messaging.servicebus.ServiceBusProcessorClient.class, value)); return true;
case "produceroperation":
case "producerOperation": target.getConfiguration().setProducerOperation(property(camelContext, org.apache.camel.component.azure.servicebus.ServiceBusProducerOperationDefinition.class, value)); return true;
case "proxyoptions":
case "proxyOptions": target.getConfiguration().setProxyOptions(property(camelContext, com.azure.core.amqp.ProxyOptions.class, value)); return true;
case "scheduledenqueuetime":
case "scheduledEnqueueTime": target.getConfiguration().setScheduledEnqueueTime(property(camelContext, java.time.OffsetDateTime.class, value)); return true;
case "senderclient":
case "senderClient": target.getConfiguration().setSenderClient(property(camelContext, com.azure.messaging.servicebus.ServiceBusSenderClient.class, value)); return true;
case "servicebusreceivemode":
case "serviceBusReceiveMode": target.getConfiguration().setServiceBusReceiveMode(property(camelContext, com.azure.messaging.servicebus.models.ServiceBusReceiveMode.class, value)); return true;
case "servicebustransactioncontext":
case "serviceBusTransactionContext": target.getConfiguration().setServiceBusTransactionContext(property(camelContext, com.azure.messaging.servicebus.ServiceBusTransactionContext.class, value)); return true;
case "servicebustype":
case "serviceBusType": target.getConfiguration().setServiceBusType(property(camelContext, org.apache.camel.component.azure.servicebus.ServiceBusType.class, value)); return true;
case "sessionenabled":
case "sessionEnabled": target.getConfiguration().setSessionEnabled(property(camelContext, boolean.class, value)); return true;
case "sessionid":
case "sessionId": target.getConfiguration().setSessionId(property(camelContext, java.lang.String.class, value)); return true;
case "subqueue":
case "subQueue": target.getConfiguration().setSubQueue(property(camelContext, com.azure.messaging.servicebus.models.SubQueue.class, value)); return true;
case "subscriptionname":
case "subscriptionName": target.getConfiguration().setSubscriptionName(property(camelContext, java.lang.String.class, value)); return true;
case "tokencredential":
case "tokenCredential": target.getConfiguration().setTokenCredential(property(camelContext, com.azure.core.credential.TokenCredential.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"processorClient", "senderClient", "tokenCredential"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "amqpretryoptions":
case "amqpRetryOptions": return com.azure.core.amqp.AmqpRetryOptions.class;
case "amqptransporttype":
case "amqpTransportType": return com.azure.core.amqp.AmqpTransportType.class;
case "binary": return boolean.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "clientoptions":
case "clientOptions": return com.azure.core.util.ClientOptions.class;
case "connectionstring":
case "connectionString": return java.lang.String.class;
case "credentialtype":
case "credentialType": return org.apache.camel.component.azure.servicebus.CredentialType.class;
case "enabledeadlettering":
case "enableDeadLettering": return boolean.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "fullyqualifiednamespace":
case "fullyQualifiedNamespace": return java.lang.String.class;
case "headerfilterstrategy":
case "headerFilterStrategy": return org.apache.camel.spi.HeaderFilterStrategy.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "maxautolockrenewduration":
case "maxAutoLockRenewDuration": return long.class;
case "maxconcurrentcalls":
case "maxConcurrentCalls": return int.class;
case "prefetchcount":
case "prefetchCount": return int.class;
case "processorclient":
case "processorClient": return com.azure.messaging.servicebus.ServiceBusProcessorClient.class;
case "produceroperation":
case "producerOperation": return org.apache.camel.component.azure.servicebus.ServiceBusProducerOperationDefinition.class;
case "proxyoptions":
case "proxyOptions": return com.azure.core.amqp.ProxyOptions.class;
case "scheduledenqueuetime":
case "scheduledEnqueueTime": return java.time.OffsetDateTime.class;
case "senderclient":
case "senderClient": return com.azure.messaging.servicebus.ServiceBusSenderClient.class;
case "servicebusreceivemode":
case "serviceBusReceiveMode": return com.azure.messaging.servicebus.models.ServiceBusReceiveMode.class;
case "servicebustransactioncontext":
case "serviceBusTransactionContext": return com.azure.messaging.servicebus.ServiceBusTransactionContext.class;
case "servicebustype":
case "serviceBusType": return org.apache.camel.component.azure.servicebus.ServiceBusType.class;
case "sessionenabled":
case "sessionEnabled": return boolean.class;
case "sessionid":
case "sessionId": return java.lang.String.class;
case "subqueue":
case "subQueue": return com.azure.messaging.servicebus.models.SubQueue.class;
case "subscriptionname":
case "subscriptionName": return java.lang.String.class;
case "tokencredential":
case "tokenCredential": return com.azure.core.credential.TokenCredential.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
ServiceBusEndpoint target = (ServiceBusEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "amqpretryoptions":
case "amqpRetryOptions": return target.getConfiguration().getAmqpRetryOptions();
case "amqptransporttype":
case "amqpTransportType": return target.getConfiguration().getAmqpTransportType();
case "binary": return target.getConfiguration().isBinary();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "clientoptions":
case "clientOptions": return target.getConfiguration().getClientOptions();
case "connectionstring":
case "connectionString": return target.getConfiguration().getConnectionString();
case "credentialtype":
case "credentialType": return target.getConfiguration().getCredentialType();
case "enabledeadlettering":
case "enableDeadLettering": return target.getConfiguration().isEnableDeadLettering();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "fullyqualifiednamespace":
case "fullyQualifiedNamespace": return target.getConfiguration().getFullyQualifiedNamespace();
case "headerfilterstrategy":
case "headerFilterStrategy": return target.getConfiguration().getHeaderFilterStrategy();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "maxautolockrenewduration":
case "maxAutoLockRenewDuration": return target.getConfiguration().getMaxAutoLockRenewDuration();
case "maxconcurrentcalls":
case "maxConcurrentCalls": return target.getConfiguration().getMaxConcurrentCalls();
case "prefetchcount":
case "prefetchCount": return target.getConfiguration().getPrefetchCount();
case "processorclient":
case "processorClient": return target.getConfiguration().getProcessorClient();
case "produceroperation":
case "producerOperation": return target.getConfiguration().getProducerOperation();
case "proxyoptions":
case "proxyOptions": return target.getConfiguration().getProxyOptions();
case "scheduledenqueuetime":
case "scheduledEnqueueTime": return target.getConfiguration().getScheduledEnqueueTime();
case "senderclient":
case "senderClient": return target.getConfiguration().getSenderClient();
case "servicebusreceivemode":
case "serviceBusReceiveMode": return target.getConfiguration().getServiceBusReceiveMode();
case "servicebustransactioncontext":
case "serviceBusTransactionContext": return target.getConfiguration().getServiceBusTransactionContext();
case "servicebustype":
case "serviceBusType": return target.getConfiguration().getServiceBusType();
case "sessionenabled":
case "sessionEnabled": return target.getConfiguration().isSessionEnabled();
case "sessionid":
case "sessionId": return target.getConfiguration().getSessionId();
case "subqueue":
case "subQueue": return target.getConfiguration().getSubQueue();
case "subscriptionname":
case "subscriptionName": return target.getConfiguration().getSubscriptionName();
case "tokencredential":
case "tokenCredential": return target.getConfiguration().getTokenCredential();
default: return null;
}
}
}
| ServiceBusEndpointConfigurer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/association/Kitchen.java | {
"start": 341,
"end": 645
} | class ____ {
@Id
@GeneratedValue
private Long id;
@OneToOne(mappedBy = "kitchen")
private Oven oven;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Oven getOven() {
return oven;
}
public void setOven(Oven oven) {
this.oven = oven;
}
}
| Kitchen |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/config/HandlersBeanDefinitionParserTests.java | {
"start": 13105,
"end": 13347
} | class ____ implements HandshakeHandler {
@Override
public boolean doHandshake(ServerHttpRequest request, ServerHttpResponse response,
WebSocketHandler wsHandler, Map<String, Object> attributes) {
return false;
}
}
| TestHandshakeHandler |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/autoconfigure/metrics/DataSourcePoolMetricsAutoConfiguration.java | {
"start": 2848,
"end": 3350
} | class ____ {
private static final String DATASOURCE_SUFFIX = "dataSource";
@Bean
DataSourcePoolMetadataMeterBinder dataSourcePoolMetadataMeterBinder(ConfigurableListableBeanFactory beanFactory,
ObjectProvider<DataSourcePoolMetadataProvider> metadataProviders) {
return new DataSourcePoolMetadataMeterBinder(SimpleAutowireCandidateResolver
.resolveAutowireCandidates(beanFactory, DataSource.class, false, true), metadataProviders);
}
static | DataSourcePoolMetadataMetricsConfiguration |
java | hibernate__hibernate-orm | local-build-plugins/src/main/java/org/hibernate/build/maven/embedder/RunMavenTask.java | {
"start": 453,
"end": 999
} | class ____ extends DefaultTask {
@ServiceReference
abstract Property<MavenEmbedderService> getMavenEmbedderService();
@Input
abstract Property<String> getGoals();
@Input
abstract ListProperty<String> getArguments();
@TaskAction
public void run() {
getMavenEmbedderService().get().execute( constructTaskAndArgs() );
}
private String[] constructTaskAndArgs() {
List<String> args = new ArrayList<String>();
args.add( getGoals().get() );
args.addAll( getArguments().get() );
return args.toArray(new String[0]);
}
}
| RunMavenTask |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/schedulers/SchedulerTest.java | {
"start": 1231,
"end": 8357
} | class ____ extends RxJavaTest {
@Test
public void defaultPeriodicTask() {
final int[] count = { 0 };
TestScheduler scheduler = new TestScheduler();
Disposable d = scheduler.schedulePeriodicallyDirect(new Runnable() {
@Override
public void run() {
count[0]++;
}
}, 100, 100, TimeUnit.MILLISECONDS);
assertEquals(0, count[0]);
assertFalse(d.isDisposed());
scheduler.advanceTimeBy(200, TimeUnit.MILLISECONDS);
assertEquals(2, count[0]);
d.dispose();
assertTrue(d.isDisposed());
scheduler.advanceTimeBy(200, TimeUnit.MILLISECONDS);
assertEquals(2, count[0]);
}
@Test
public void periodicDirectThrows() throws Throwable {
TestHelper.withErrorTracking(errors -> {
TestScheduler scheduler = new TestScheduler();
try {
scheduler.schedulePeriodicallyDirect(new Runnable() {
@Override
public void run() {
throw new TestException();
}
}, 100, 100, TimeUnit.MILLISECONDS);
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
fail("Should have thrown!");
} catch (TestException expected) {
// expected
}
TestHelper.assertUndeliverable(errors, 0, TestException.class);
});
}
@Test
public void disposePeriodicDirect() {
final int[] count = { 0 };
TestScheduler scheduler = new TestScheduler();
Disposable d = scheduler.schedulePeriodicallyDirect(new Runnable() {
@Override
public void run() {
count[0]++;
}
}, 100, 100, TimeUnit.MILLISECONDS);
d.dispose();
assertEquals(0, count[0]);
assertTrue(d.isDisposed());
scheduler.advanceTimeBy(200, TimeUnit.MILLISECONDS);
assertEquals(0, count[0]);
assertTrue(d.isDisposed());
}
@Test
public void scheduleDirect() {
final int[] count = { 0 };
TestScheduler scheduler = new TestScheduler();
scheduler.scheduleDirect(new Runnable() {
@Override
public void run() {
count[0]++;
}
}, 100, TimeUnit.MILLISECONDS);
assertEquals(0, count[0]);
scheduler.advanceTimeBy(200, TimeUnit.MILLISECONDS);
assertEquals(1, count[0]);
}
@Test
public void disposeSelfPeriodicDirect() {
final int[] count = { 0 };
TestScheduler scheduler = new TestScheduler();
final SequentialDisposable sd = new SequentialDisposable();
Disposable d = scheduler.schedulePeriodicallyDirect(new Runnable() {
@Override
public void run() {
count[0]++;
sd.dispose();
}
}, 100, 100, TimeUnit.MILLISECONDS);
sd.set(d);
assertEquals(0, count[0]);
assertFalse(d.isDisposed());
scheduler.advanceTimeBy(400, TimeUnit.MILLISECONDS);
assertEquals(1, count[0]);
assertTrue(d.isDisposed());
}
@Test
public void disposeSelfPeriodic() {
final int[] count = { 0 };
TestScheduler scheduler = new TestScheduler();
Worker worker = scheduler.createWorker();
try {
final SequentialDisposable sd = new SequentialDisposable();
Disposable d = worker.schedulePeriodically(new Runnable() {
@Override
public void run() {
count[0]++;
sd.dispose();
}
}, 100, 100, TimeUnit.MILLISECONDS);
sd.set(d);
assertEquals(0, count[0]);
assertFalse(d.isDisposed());
scheduler.advanceTimeBy(400, TimeUnit.MILLISECONDS);
assertEquals(1, count[0]);
assertTrue(d.isDisposed());
} finally {
worker.dispose();
}
}
@Test
public void periodicDirectTaskRace() {
final TestScheduler scheduler = new TestScheduler();
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final Disposable d = scheduler.schedulePeriodicallyDirect(Functions.EMPTY_RUNNABLE, 1, 1, TimeUnit.MILLISECONDS);
Runnable r1 = new Runnable() {
@Override
public void run() {
d.dispose();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
scheduler.advanceTimeBy(1, TimeUnit.SECONDS);
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void periodicDirectTaskRaceIO() throws Exception {
final Scheduler scheduler = Schedulers.io();
for (int i = 0; i < 100; i++) {
final Disposable d = scheduler.schedulePeriodicallyDirect(
Functions.EMPTY_RUNNABLE, 0, 0, TimeUnit.MILLISECONDS);
Thread.sleep(1);
d.dispose();
}
}
@Test
public void scheduleDirectThrows() throws Exception {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
Schedulers.io().scheduleDirect(new Runnable() {
@Override
public void run() {
throw new TestException();
}
});
Thread.sleep(250);
assertTrue(list.size() >= 1);
TestHelper.assertUndeliverable(list, 0, TestException.class, null);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void schedulersUtility() {
TestHelper.checkUtilityClass(Schedulers.class);
}
@Test
public void defaultSchedulePeriodicallyDirectRejects() {
Scheduler s = new Scheduler() {
@NonNull
@Override
public Worker createWorker() {
return new Worker() {
@NonNull
@Override
public Disposable schedule(@NonNull Runnable run, long delay, @NonNull TimeUnit unit) {
return EmptyDisposable.INSTANCE;
}
@Override
public void dispose() {
}
@Override
public boolean isDisposed() {
return false;
}
};
}
};
assertSame(EmptyDisposable.INSTANCE, s.schedulePeriodicallyDirect(Functions.EMPTY_RUNNABLE, 1, 1, TimeUnit.MILLISECONDS));
}
@Test
public void holders() {
assertNotNull(new Schedulers.ComputationHolder());
assertNotNull(new Schedulers.IoHolder());
assertNotNull(new Schedulers.NewThreadHolder());
assertNotNull(new Schedulers.SingleHolder());
}
static final | SchedulerTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ExtendsAutoValueTest.java | {
"start": 3193,
"end": 3313
} | class ____ {
abstract AutoClass build();
}
}
// BUG: Diagnostic contains: Do not extend an @AutoValue.Builder | Builder |
java | apache__camel | components/camel-smpp/src/main/java/org/apache/camel/component/smpp/SmppMessage.java | {
"start": 1450,
"end": 3914
} | class ____ extends DefaultMessage {
private static final Logger LOG = LoggerFactory.getLogger(SmppMessage.class);
private Command command;
private SmppConfiguration configuration;
public SmppMessage(CamelContext camelContext, Command command, SmppConfiguration configuration) {
super(camelContext);
this.command = command;
this.configuration = configuration;
}
@Override
public SmppMessage newInstance() {
return new SmppMessage(getCamelContext(), null, this.configuration);
}
public boolean isAlertNotification() {
return command instanceof AlertNotification;
}
public boolean isDataSm() {
return command instanceof DataSm;
}
public boolean isDeliverSm() {
return command instanceof DeliverSm && !((DeliverSm) command).isSmscDeliveryReceipt();
}
public boolean isDeliveryReceipt() {
return command instanceof DeliverSm && ((DeliverSm) command).isSmscDeliveryReceipt();
}
@Override
protected Object createBody() {
if (command instanceof MessageRequest) {
MessageRequest msgRequest = (MessageRequest) command;
byte[] shortMessage = msgRequest.getShortMessage();
if (shortMessage == null || shortMessage.length == 0) {
return null;
}
Alphabet alphabet = Alphabet.parseDataCoding(msgRequest.getDataCoding());
if (SmppUtils.is8Bit(alphabet)) {
return shortMessage;
}
String encoding = ExchangeHelper.getCharsetName(getExchange(), false);
if (ObjectHelper.isEmpty(encoding) || !Charset.isSupported(encoding)) {
encoding = configuration.getEncoding();
}
try {
return new String(shortMessage, encoding);
} catch (UnsupportedEncodingException e) {
LOG.info("Unsupported encoding \"{}\". Using system default encoding.", encoding);
}
return new String(shortMessage);
}
return null;
}
@Override
public String toString() {
if (command != null) {
return "SmppMessage: " + command;
} else {
return "SmppMessage: " + getBody();
}
}
/**
* Returns the underlying jSMPP command
*
* @return command
*/
public Command getCommand() {
return command;
}
}
| SmppMessage |
java | apache__camel | components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/strategy/ExclusiveReadLockCheck.java | {
"start": 1148,
"end": 5646
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(ExclusiveReadLockCheck.class);
private final boolean fastExistsCheck;
private final long startTime;
private final long minAge;
private final long minLength;
private final StopWatch watch;
private long lastModified;
private long length;
public ExclusiveReadLockCheck(boolean fastExistsCheck, long minAge, long minLength) {
this.fastExistsCheck = fastExistsCheck;
this.startTime = new Date().getTime();
this.minAge = minAge;
this.minLength = minLength;
this.watch = new StopWatch();
this.lastModified = Long.MIN_VALUE;
this.length = Long.MIN_VALUE;
}
public boolean tryAcquireExclusiveReadLock(GenericFileOperations<FTPFile> operations, GenericFile<FTPFile> file) {
long newLastModified = 0;
long newLength = 0;
FTPFile[] files = getFtpFiles(operations, file);
LOG.trace("List files {} found {} files", file.getAbsoluteFilePath(), files.length);
for (FTPFile f : files) {
boolean match;
if (fastExistsCheck) {
// uses the absolute file path as well
match = f.getName().equals(file.getAbsoluteFilePath()) || f.getName().equals(file.getFileNameOnly());
} else {
match = f.getName().equals(file.getFileNameOnly());
}
if (match) {
newLength = f.getSize();
if (f.getTimestamp() != null) {
newLastModified = f.getTimestamp().getTimeInMillis();
}
}
}
LOG.trace("Previous last modified: {}, new last modified: {}", lastModified, newLastModified);
LOG.trace("Previous length: {}, new length: {}", length, newLength);
long newOlderThan = startTime + watch.taken() - minAge;
LOG.trace("New older than threshold: {}", newOlderThan);
if (isReadLockAcquired(lastModified, length, newLastModified, newLength, newOlderThan)) {
LOG.trace("Read lock acquired.");
return true;
}
lastModified = newLastModified;
length = newLength;
return false;
}
private FTPFile[] getFtpFiles(GenericFileOperations<FTPFile> operations, GenericFile<FTPFile> file) {
FTPFile[] files;
if (fastExistsCheck) {
// use the absolute file path to only pickup the file we want to
// check, this avoids expensive
// list operations if we have a lot of files in the directory
files = getFtpFilesFast(operations, file);
} else {
files = getFtpFilesByFilter(operations, file);
}
return files;
}
private FTPFile[] getFtpFilesByFilter(GenericFileOperations<FTPFile> operations, GenericFile<FTPFile> file) {
// fast option not enabled, so list the directory and filter the
// file name
String path = file.getParent();
if (path.equals("/") || path.equals("\\")) {
// special for root (= home) directory
LOG.trace(
"Using full directory listing in home directory to update file information. Consider enabling fastExistsCheck option.");
return operations.listFiles();
} else {
LOG.trace(
"Using full directory listing to update file information for {}. Consider enabling fastExistsCheck option.",
path);
return operations.listFiles(path);
}
}
private FTPFile[] getFtpFilesFast(GenericFileOperations<FTPFile> operations, GenericFile<FTPFile> file) {
String path = file.getAbsoluteFilePath();
if (path.equals("/") || path.equals("\\")) {
// special for root (= home) directory
LOG.trace("Using fast exists to update file information in home directory");
return operations.listFiles();
} else {
LOG.trace("Using fast exists to update file information for {}", path);
return operations.listFiles(path);
}
}
private boolean isReadLockAcquired(
long lastModified, long length, long newLastModified, long newLength, long newOlderThan) {
return newLength >= minLength && (minAge == 0 && newLastModified == lastModified && newLength == length
|| minAge != 0 && newLastModified < newOlderThan);
}
}
| ExclusiveReadLockCheck |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_with_records_Test.java | {
"start": 1009,
"end": 1131
} | class ____ extends WithComparingFieldsIntrospectionStrategyBaseTest {
@Nested
| RecursiveComparisonAssert_with_records_Test |
java | quarkusio__quarkus | extensions/security-jpa-reactive/deployment/src/test/java/io/quarkus/security/jpa/reactive/FormAuthJpaTestCase.java | {
"start": 600,
"end": 4484
} | class ____ {
private static final String APP_PROPS = "" +
"quarkus.datasource.db-kind=postgresql\n" +
"quarkus.datasource.username=${postgres.reactive.username}\n" +
"quarkus.datasource.password=${postgres.reactive.password}\n" +
"quarkus.datasource.reactive=true\n" +
"quarkus.datasource.reactive.url=${postgres.reactive.url}\n" +
"quarkus.hibernate-orm.sql-load-script=import.sql\n" +
"quarkus.hibernate-orm.schema-management.strategy=drop-and-create\n" +
"#quarkus.hibernate-orm.log.sql=true\n" +
"quarkus.http.auth.form.enabled=true\n" +
"quarkus.http.auth.form.login-page=login\n" +
"quarkus.http.auth.form.error-page=error\n" +
"quarkus.http.auth.form.landing-page=landing\n" +
"quarkus.http.auth.policy.r1.roles-allowed=admin\n" +
"quarkus.http.auth.permission.roles1.paths=/admin%E2%9D%A4\n" +
"quarkus.http.auth.permission.roles1.policy=r1\n" +
"quarkus.http.auth.form.timeout=PT2S\n" +
"quarkus.http.auth.form.new-cookie-interval=PT1S\n" +
"quarkus.http.auth.form.cookie-name=laitnederc-sukrauq\n" +
"quarkus.http.auth.session.encryption-key=CHANGEIT-CHANGEIT-CHANGEIT-CHANGEIT-CHANGEIT\n";
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(SingleRoleSecuredResource.class, TestApplication.class, RolesEndpointClassLevel.class,
ParametrizedPathsResource.class, SubjectExposingResource.class, MinimalUserEntity.class)
.addAsResource("minimal-config/import.sql", "import.sql")
.addAsResource(new StringAsset(APP_PROPS), "application.properties");
}
});
@Test
public void testFormBasedAuthSuccess() {
RestAssured.enableLoggingOfRequestAndResponseIfValidationFails();
CookieFilter cookies = new CookieFilter();
RestAssured
.given()
.filter(cookies)
.redirects().follow(false)
.when()
.get("/jaxrs-secured/user-secured")
.then()
.assertThat()
.statusCode(302)
.header("location", containsString("/login"))
.cookie("quarkus-redirect-location", containsString("/user-secured"));
// test with a non-existent user
RestAssured
.given()
.filter(cookies)
.redirects().follow(false)
.when()
.formParam("j_username", "dummy")
.formParam("j_password", "dummy")
.post("/j_security_check")
.then()
.assertThat()
.statusCode(302);
RestAssured
.given()
.filter(cookies)
.redirects().follow(false)
.when()
.formParam("j_username", "user")
.formParam("j_password", "user")
.post("/j_security_check")
.then()
.assertThat()
.statusCode(302)
.header("location", containsString("/user-secured"))
.cookie("laitnederc-sukrauq", notNullValue());
RestAssured
.given()
.filter(cookies)
.redirects().follow(false)
.when()
.get("/jaxrs-secured/user-secured")
.then()
.assertThat()
.statusCode(200)
.body(equalTo("A secured message"));
}
}
| FormAuthJpaTestCase |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1153/ErroneousIssue1153Mapper.java | {
"start": 860,
"end": 1197
} | class ____ {
//CHECKSTYLE:OFF
public String nested;
public String writable;
//CHECKSTYLE:ON
}
//CHECKSTYLE:OFF
public String nonNested;
public NestedSource nestedSource;
public NestedSource nestedSource2;
//CHECKSTYLE:ON
}
| NestedSource |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/query/KvStateRegistryTest.java | {
"start": 10455,
"end": 11706
} | class ____ implements KvStateRegistryListener {
private final Queue<JobID> stateRegisteredNotifications;
private final Queue<JobID> stateDeregisteredNotifications;
private TestingKvStateRegistryListener(
Queue<JobID> stateRegisteredNotifications,
Queue<JobID> stateDeregisteredNotifications) {
this.stateRegisteredNotifications = stateRegisteredNotifications;
this.stateDeregisteredNotifications = stateDeregisteredNotifications;
}
@Override
public void notifyKvStateRegistered(
JobID jobId,
JobVertexID jobVertexId,
KeyGroupRange keyGroupRange,
String registrationName,
KvStateID kvStateId) {
stateRegisteredNotifications.offer(jobId);
}
@Override
public void notifyKvStateUnregistered(
JobID jobId,
JobVertexID jobVertexId,
KeyGroupRange keyGroupRange,
String registrationName) {
stateDeregisteredNotifications.offer(jobId);
}
}
/** Testing implementation of {@link InternalKvState}. */
private static | TestingKvStateRegistryListener |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/SystemUtils.java | {
"start": 10217,
"end": 10849
} | class ____ loaded, the value will be out of sync with that System property.
* </p>
*
* @see SystemProperties#getJavaExtDirs()
* @since Java 1.3
* @deprecated Deprecated without replacement.
*/
@Deprecated
public static final String JAVA_EXT_DIRS = SystemProperties.getJavaExtDirs();
/**
* A constant for the System Property {@code java.home}. Java installation directory.
*
* <p>
* Defaults to {@code null} if the runtime does not have security access to read this property or the property does not exist.
* </p>
* <p>
* This value is initialized when the | is |
java | google__guava | android/guava/src/com/google/common/io/LittleEndianDataInputStream.java | {
"start": 1625,
"end": 7483
} | class ____ extends FilterInputStream implements DataInput {
/**
* Creates a {@code LittleEndianDataInputStream} that wraps the given stream.
*
* @param in the stream to delegate to
*/
public LittleEndianDataInputStream(InputStream in) {
super(Preconditions.checkNotNull(in));
}
/** This method will throw an {@link UnsupportedOperationException}. */
@CanIgnoreReturnValue // to skip a line
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public String readLine() {
throw new UnsupportedOperationException("readLine is not supported");
}
@Override
public void readFully(byte[] b) throws IOException {
ByteStreams.readFully(this, b);
}
@Override
public void readFully(byte[] b, int off, int len) throws IOException {
ByteStreams.readFully(this, b, off, len);
}
@Override
public int skipBytes(int n) throws IOException {
return (int) in.skip(n);
}
@CanIgnoreReturnValue // to skip a byte
@Override
public int readUnsignedByte() throws IOException {
int b1 = in.read();
if (b1 < 0) {
throw new EOFException();
}
return b1;
}
/**
* Reads an unsigned {@code short} as specified by {@link DataInputStream#readUnsignedShort()},
* except using little-endian byte order.
*
* @return the next two bytes of the input stream, interpreted as an unsigned 16-bit integer in
* little-endian byte order
* @throws IOException if an I/O error occurs
*/
@CanIgnoreReturnValue // to skip some bytes
@Override
public int readUnsignedShort() throws IOException {
byte b1 = readAndCheckByte();
byte b2 = readAndCheckByte();
return Ints.fromBytes((byte) 0, (byte) 0, b2, b1);
}
/**
* Reads an integer as specified by {@link DataInputStream#readInt()}, except using little-endian
* byte order.
*
* @return the next four bytes of the input stream, interpreted as an {@code int} in little-endian
* byte order
* @throws IOException if an I/O error occurs
*/
@CanIgnoreReturnValue // to skip some bytes
@Override
public int readInt() throws IOException {
byte b1 = readAndCheckByte();
byte b2 = readAndCheckByte();
byte b3 = readAndCheckByte();
byte b4 = readAndCheckByte();
return Ints.fromBytes(b4, b3, b2, b1);
}
/**
* Reads a {@code long} as specified by {@link DataInputStream#readLong()}, except using
* little-endian byte order.
*
* @return the next eight bytes of the input stream, interpreted as a {@code long} in
* little-endian byte order
* @throws IOException if an I/O error occurs
*/
@CanIgnoreReturnValue // to skip some bytes
@Override
public long readLong() throws IOException {
byte b1 = readAndCheckByte();
byte b2 = readAndCheckByte();
byte b3 = readAndCheckByte();
byte b4 = readAndCheckByte();
byte b5 = readAndCheckByte();
byte b6 = readAndCheckByte();
byte b7 = readAndCheckByte();
byte b8 = readAndCheckByte();
return Longs.fromBytes(b8, b7, b6, b5, b4, b3, b2, b1);
}
/**
* Reads a {@code float} as specified by {@link DataInputStream#readFloat()}, except using
* little-endian byte order.
*
* @return the next four bytes of the input stream, interpreted as a {@code float} in
* little-endian byte order
* @throws IOException if an I/O error occurs
*/
@CanIgnoreReturnValue // to skip some bytes
@Override
public float readFloat() throws IOException {
return Float.intBitsToFloat(readInt());
}
/**
* Reads a {@code double} as specified by {@link DataInputStream#readDouble()}, except using
* little-endian byte order.
*
* @return the next eight bytes of the input stream, interpreted as a {@code double} in
* little-endian byte order
* @throws IOException if an I/O error occurs
*/
@CanIgnoreReturnValue // to skip some bytes
@Override
public double readDouble() throws IOException {
return Double.longBitsToDouble(readLong());
}
@CanIgnoreReturnValue // to skip a field
@Override
public String readUTF() throws IOException {
return new DataInputStream(in).readUTF();
}
/**
* Reads a {@code short} as specified by {@link DataInputStream#readShort()}, except using
* little-endian byte order.
*
* @return the next two bytes of the input stream, interpreted as a {@code short} in little-endian
* byte order.
* @throws IOException if an I/O error occurs.
*/
@CanIgnoreReturnValue // to skip some bytes
@Override
public short readShort() throws IOException {
return (short) readUnsignedShort();
}
/**
* Reads a char as specified by {@link DataInputStream#readChar()}, except using little-endian
* byte order.
*
* @return the next two bytes of the input stream, interpreted as a {@code char} in little-endian
* byte order
* @throws IOException if an I/O error occurs
*/
@CanIgnoreReturnValue // to skip some bytes
@Override
public char readChar() throws IOException {
return (char) readUnsignedShort();
}
@CanIgnoreReturnValue // to skip a byte
@Override
public byte readByte() throws IOException {
return (byte) readUnsignedByte();
}
@CanIgnoreReturnValue // to skip a byte
@Override
public boolean readBoolean() throws IOException {
return readUnsignedByte() != 0;
}
/**
* Reads a byte from the input stream checking that the end of file (EOF) has not been
* encountered.
*
* @return byte read from input
* @throws IOException if an error is encountered while reading
* @throws EOFException if the end of file (EOF) is encountered.
*/
private byte readAndCheckByte() throws IOException, EOFException {
int b1 = in.read();
if (b1 == -1) {
throw new EOFException();
}
return (byte) b1;
}
}
| LittleEndianDataInputStream |
java | apache__flink | flink-metrics/flink-metrics-graphite/src/main/java/org/apache/flink/metrics/graphite/GraphiteReporter.java | {
"start": 1209,
"end": 1367
} | class ____ as a factory for the {@link com.codahale.metrics.graphite.GraphiteReporter} and
* allows using it as a Flink reporter.
*/
@PublicEvolving
public | acts |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/idclass/IdClassQueryTest.java | {
"start": 3830,
"end": 4634
} | class ____ {
@Id
protected String id;
@Column(name = "USERNAME")
protected String username;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
UserEntity that = (UserEntity) o;
return Objects.equals( id, that.id ) && Objects.equals( username, that.username );
}
@Override
public int hashCode() {
return Objects.hash( id, username );
}
}
@Entity(name = "FederatedIdentityEntity")
@IdClass(Key.class)
public static | UserEntity |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/Schedule.java | {
"start": 828,
"end": 919
} | interface ____ extends SchedulerEngine.Schedule, ToXContent {
String type();
| Schedule |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/float_/FloatAssert_isNegative_Test.java | {
"start": 882,
"end": 1190
} | class ____ extends FloatAssertBaseTest {
@Override
protected FloatAssert invoke_api_method() {
return assertions.isNegative();
}
@Override
protected void verify_internal_effects() {
verify(floats).assertIsNegative(getInfo(assertions), getActual(assertions));
}
}
| FloatAssert_isNegative_Test |
java | google__auto | value/src/test/java/com/google/auto/value/processor/GeneratedDoesNotExistTest.java | {
"start": 8108,
"end": 9567
} | class ____ extends Baz {",
" AutoValue_Baz() {",
" }",
"",
" @Override public String toString() {",
" return \"Baz{\"",
" + \"}\";",
" }",
"",
" @Override public boolean equals(Object o) {",
" if (o == this) {",
" return true;",
" }",
" if (o instanceof Baz) {",
" return true;",
" }",
" return false;",
" }",
"",
" @Override public int hashCode() {",
" int h$ = 1;",
" return h$;",
" }",
"}");
Set<String> ignoredGenerated = ConcurrentHashMap.newKeySet();
Processor autoValueProcessor = new AutoValueProcessor();
ProcessorHandler handler = new ProcessorHandler(autoValueProcessor, ignoredGenerated);
Processor noGeneratedProcessor = partialProxy(Processor.class, handler);
Compilation compilation =
javac()
.withOptions(javacOptions)
.withProcessors(noGeneratedProcessor)
.compile(javaFileObject);
assertThat(compilation).succeededWithoutWarnings();
assertThat(compilation)
.generatedSourceFile("foo.bar.AutoValue_Baz")
.hasSourceEquivalentTo(expectedOutput);
assertThat(ignoredGenerated).containsExactly(expectedAnnotation);
}
}
| AutoValue_Baz |
java | elastic__elasticsearch | x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java | {
"start": 19715,
"end": 21836
} | class ____ extends ESMockClient {
SuccessfulESMockClient(ThreadPool threadPool, CircuitBreaker circuitBreaker, int expectedSearchRequestsCount) {
super(threadPool, circuitBreaker, expectedSearchRequestsCount);
}
@SuppressWarnings("unchecked")
@Override
<Response extends ActionResponse> void handleSearchRequest(ActionListener<Response> listener, SearchRequest searchRequest) {
int ordinal = searchRequest.source().terminateAfter();
SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal));
searchHit.sortValues(
new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW })
);
SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f);
SearchResponse response = new SearchResponse(
searchHits,
null,
null,
false,
false,
null,
0,
null,
2,
0,
0,
0,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY,
searchRequest.pointInTimeBuilder().getEncodedId()
);
if (searchRequestsRemainingCount() == 1) {
assertEquals(0, circuitBreaker.getUsed()); // this is the first response, so no memory usage so far
} else {
assertTrue(circuitBreaker.getUsed() > 0); // at this point the algorithm already started adding up to memory usage
}
ActionListener.respondAndRelease(listener, (Response) response);
}
}
/*
* For a failed sequence request, there'll be a successful first request followed by a second one that throws an exception
* the first search request is designed to return valid results to allow the tumbling window to start the algorithm
*/
private | SuccessfulESMockClient |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/DateCheckerTest.java | {
"start": 3364,
"end": 3680
} | class ____ {
Date good = new Date(120, JULY, 10);
}
""")
.doTest();
}
@Test
public void constructor_nonConstantMonth() {
helper
.addSourceLines(
"TestClass.java",
"""
import java.util.Date;
public | TestClass |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/annotation/JSONTypeAutoTypeCheckHandlerTest.java | {
"start": 730,
"end": 1123
} | class ____ implements ParserConfig.AutoTypeCheckHandler {
public Class<?> handler(String typeName, Class<?> expectClass, int features) {
if ("Cat".equals(typeName)) {
return Cat.class;
}
if ("Mouse".equals(typeName)) {
return Mouse.class;
}
return null;
}
}
}
| MyAutoTypeCheckHandler |
java | dropwizard__dropwizard | dropwizard-health/src/main/java/io/dropwizard/health/ShutdownNotifier.java | {
"start": 38,
"end": 120
} | interface ____ {
void notifyShutdownStarted() throws Exception;
}
| ShutdownNotifier |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/generated/GeneratedFilterCondition.java | {
"start": 1479,
"end": 1916
} | class ____ of the generated FilterCondition.
* @param code code of the generated FilterCondition.
* @param references referenced objects of the generated FilterCondition.
* @param conf configuration when generating FilterCondition.
*/
public GeneratedFilterCondition(
String className, String code, Object[] references, ReadableConfig conf) {
super(className, code, references, conf);
}
}
| name |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_noneMatch_with_description_Test.java | {
"start": 1046,
"end": 1592
} | class ____ extends AtomicReferenceArrayAssertBaseTest {
private Predicate<Object> predicate;
@BeforeEach
void beforeOnce() {
predicate = Objects::nonNull;
}
@Override
protected AtomicReferenceArrayAssert<Object> invoke_api_method() {
return assertions.noneMatch(predicate, "custom");
}
@Override
protected void verify_internal_effects() {
verify(iterables).assertNoneMatch(info(), list(internalArray()), predicate, new PredicateDescription("custom"));
}
}
| AtomicReferenceArrayAssert_noneMatch_with_description_Test |
java | quarkusio__quarkus | integration-tests/narayana-stm/src/main/java/org/acme/quickstart/stm/FlightService.java | {
"start": 170,
"end": 268
} | interface ____ {
int getNumberOfBookings();
void makeBooking(String details);
}
| FlightService |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/entity/Address.java | {
"start": 315,
"end": 417
} | class ____ a property
*
* @author Emmanuel Bernard
*/
@Entity
@Table(name = "serial_address")
public | as |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNodesToLabelsResponsePBImpl.java | {
"start": 1622,
"end": 5186
} | class ____ extends
GetNodesToLabelsResponse {
GetNodesToLabelsResponseProto proto = GetNodesToLabelsResponseProto
.getDefaultInstance();
GetNodesToLabelsResponseProto.Builder builder = null;
boolean viaProto = false;
private Map<NodeId, Set<String>> nodeToLabels;
public GetNodesToLabelsResponsePBImpl() {
this.builder = GetNodesToLabelsResponseProto.newBuilder();
}
public GetNodesToLabelsResponsePBImpl(GetNodesToLabelsResponseProto proto) {
this.proto = proto;
this.viaProto = true;
}
private void initNodeToLabels() {
if (this.nodeToLabels != null) {
return;
}
GetNodesToLabelsResponseProtoOrBuilder p = viaProto ? proto : builder;
List<NodeIdToLabelsProto> list = p.getNodeToLabelsList();
this.nodeToLabels = new HashMap<NodeId, Set<String>>();
for (NodeIdToLabelsProto c : list) {
this.nodeToLabels.put(new NodeIdPBImpl(c.getNodeId()),
Sets.newHashSet(c.getNodeLabelsList()));
}
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetNodesToLabelsResponseProto.newBuilder(proto);
}
viaProto = false;
}
private void addNodeToLabelsToProto() {
maybeInitBuilder();
builder.clearNodeToLabels();
if (nodeToLabels == null) {
return;
}
Iterable<NodeIdToLabelsProto> iterable =
new Iterable<NodeIdToLabelsProto>() {
@Override
public Iterator<NodeIdToLabelsProto> iterator() {
return new Iterator<NodeIdToLabelsProto>() {
Iterator<Entry<NodeId, Set<String>>> iter = nodeToLabels
.entrySet().iterator();
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public NodeIdToLabelsProto next() {
Entry<NodeId, Set<String>> now = iter.next();
return NodeIdToLabelsProto.newBuilder()
.setNodeId(convertToProtoFormat(now.getKey()))
.addAllNodeLabels(now.getValue()).build();
}
@Override
public boolean hasNext() {
return iter.hasNext();
}
};
}
};
builder.addAllNodeToLabels(iterable);
}
private void mergeLocalToBuilder() {
if (this.nodeToLabels != null) {
addNodeToLabelsToProto();
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
public GetNodesToLabelsResponseProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public Map<NodeId, Set<String>> getNodeToLabels() {
initNodeToLabels();
return this.nodeToLabels;
}
@Override
public void setNodeToLabels(Map<NodeId, Set<String>> map) {
initNodeToLabels();
nodeToLabels.clear();
nodeToLabels.putAll(map);
}
private NodeIdProto convertToProtoFormat(NodeId t) {
return ((NodeIdPBImpl)t).getProto();
}
@Override
public int hashCode() {
assert false : "hashCode not designed";
return 0;
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
}
| GetNodesToLabelsResponsePBImpl |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/util/TypeUtil.java | {
"start": 1872,
"end": 9313
} | class ____ examine
* @return all declared fields for the given class (including superclasses).
* @see Class#getDeclaredFields()
*/
public static List<Field> getAllDeclaredFields(Class<?> cls) {
final List<Field> fields = new ArrayList<>();
while (cls != null) {
Collections.addAll(fields, cls.getDeclaredFields());
cls = cls.getSuperclass();
}
return fields;
}
/**
* Indicates if two {@link Type}s are assignment compatible.
*
* @param lhs the left hand side to check assignability to
* @param rhs the right hand side to check assignability from
* @return {@code true} if it is legal to assign a variable of type {@code rhs} to a variable of type {@code lhs}
* @see Class#isAssignableFrom(Class)
*/
public static boolean isAssignable(final Type lhs, final Type rhs) {
Objects.requireNonNull(lhs, "No left hand side type provided");
Objects.requireNonNull(rhs, "No right hand side type provided");
if (lhs.equals(rhs)) {
return true;
}
if (Object.class.equals(lhs)) {
// everything is assignable to Object
return true;
}
// raw type on left
if (lhs instanceof Class<?>) {
final Class<?> lhsClass = (Class<?>) lhs;
if (rhs instanceof Class<?>) {
// no generics involved
final Class<?> rhsClass = (Class<?>) rhs;
return lhsClass.isAssignableFrom(rhsClass);
}
if (rhs instanceof ParameterizedType) {
// check to see if the parameterized type has the same raw type as the lhs; this is legal
final Type rhsRawType = ((ParameterizedType) rhs).getRawType();
if (rhsRawType instanceof Class<?>) {
return lhsClass.isAssignableFrom((Class<?>) rhsRawType);
}
}
if (lhsClass.isArray() && rhs instanceof GenericArrayType) {
// check for compatible array component types
return isAssignable(lhsClass.getComponentType(), ((GenericArrayType) rhs).getGenericComponentType());
}
}
// parameterized type on left
if (lhs instanceof ParameterizedType) {
final ParameterizedType lhsType = (ParameterizedType) lhs;
if (rhs instanceof Class<?>) {
final Type lhsRawType = lhsType.getRawType();
if (lhsRawType instanceof Class<?>) {
return ((Class<?>) lhsRawType).isAssignableFrom((Class<?>) rhs);
}
} else if (rhs instanceof ParameterizedType) {
final ParameterizedType rhsType = (ParameterizedType) rhs;
return isParameterizedAssignable(lhsType, rhsType);
}
}
// generic array type on left
if (lhs instanceof GenericArrayType) {
final Type lhsComponentType = ((GenericArrayType) lhs).getGenericComponentType();
if (rhs instanceof Class<?>) {
// raw type on right
final Class<?> rhsClass = (Class<?>) rhs;
if (rhsClass.isArray()) {
return isAssignable(lhsComponentType, rhsClass.getComponentType());
}
} else if (rhs instanceof GenericArrayType) {
return isAssignable(lhsComponentType, ((GenericArrayType) rhs).getGenericComponentType());
}
}
// wildcard type on left
if (lhs instanceof WildcardType) {
return isWildcardAssignable((WildcardType) lhs, rhs);
}
// strange...
return false;
}
private static boolean isParameterizedAssignable(final ParameterizedType lhs, final ParameterizedType rhs) {
if (lhs.equals(rhs)) {
// that was easy
return true;
}
final Type[] lhsTypeArguments = lhs.getActualTypeArguments();
final Type[] rhsTypeArguments = rhs.getActualTypeArguments();
final int size = lhsTypeArguments.length;
if (rhsTypeArguments.length != size) {
// clearly incompatible types
return false;
}
for (int i = 0; i < size; i++) {
// verify all type arguments are assignable
final Type lhsArgument = lhsTypeArguments[i];
final Type rhsArgument = rhsTypeArguments[i];
if (!lhsArgument.equals(rhsArgument)
&& !(lhsArgument instanceof WildcardType
&& isWildcardAssignable((WildcardType) lhsArgument, rhsArgument))) {
return false;
}
}
return true;
}
private static boolean isWildcardAssignable(final WildcardType lhs, final Type rhs) {
final Type[] lhsUpperBounds = getEffectiveUpperBounds(lhs);
final Type[] lhsLowerBounds = getEffectiveLowerBounds(lhs);
if (rhs instanceof WildcardType) {
// oh boy, this scenario requires checking a lot of assignability!
final WildcardType rhsType = (WildcardType) rhs;
final Type[] rhsUpperBounds = getEffectiveUpperBounds(rhsType);
final Type[] rhsLowerBounds = getEffectiveLowerBounds(rhsType);
for (final Type lhsUpperBound : lhsUpperBounds) {
for (final Type rhsUpperBound : rhsUpperBounds) {
if (!isBoundAssignable(lhsUpperBound, rhsUpperBound)) {
return false;
}
}
for (final Type rhsLowerBound : rhsLowerBounds) {
if (!isBoundAssignable(lhsUpperBound, rhsLowerBound)) {
return false;
}
}
}
for (final Type lhsLowerBound : lhsLowerBounds) {
for (final Type rhsUpperBound : rhsUpperBounds) {
if (!isBoundAssignable(rhsUpperBound, lhsLowerBound)) {
return false;
}
}
for (final Type rhsLowerBound : rhsLowerBounds) {
if (!isBoundAssignable(rhsLowerBound, lhsLowerBound)) {
return false;
}
}
}
} else {
// phew, far less bounds to check
for (final Type lhsUpperBound : lhsUpperBounds) {
if (!isBoundAssignable(lhsUpperBound, rhs)) {
return false;
}
}
for (final Type lhsLowerBound : lhsLowerBounds) {
if (!isBoundAssignable(lhsLowerBound, rhs)) {
return false;
}
}
}
return true;
}
private static Type[] getEffectiveUpperBounds(final WildcardType type) {
final Type[] upperBounds = type.getUpperBounds();
return upperBounds.length == 0 ? new Type[] {Object.class} : upperBounds;
}
private static Type[] getEffectiveLowerBounds(final WildcardType type) {
final Type[] lowerBounds = type.getLowerBounds();
return lowerBounds.length == 0 ? new Type[] {null} : lowerBounds;
}
private static boolean isBoundAssignable(final Type lhs, final Type rhs) {
return (rhs == null) || ((lhs != null) && isAssignable(lhs, rhs));
}
}
| to |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/IgniteComputeEndpointBuilderFactory.java | {
"start": 12062,
"end": 14174
} | interface ____ {
/**
* Ignite Compute (camel-ignite)
* Run compute operations on an Ignite cluster.
*
* Category: cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* @return the dsl builder for the headers' name.
*/
default IgniteComputeHeaderNameBuilder igniteCompute() {
return IgniteComputeHeaderNameBuilder.INSTANCE;
}
/**
* Ignite Compute (camel-ignite)
* Run compute operations on an Ignite cluster.
*
* Category: cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* Syntax: <code>ignite-compute:endpointId</code>
*
* Path parameter: endpointId (required)
* The endpoint ID (not used).
*
* @param path endpointId
* @return the dsl builder
*/
default IgniteComputeEndpointBuilder igniteCompute(String path) {
return IgniteComputeEndpointBuilderFactory.endpointBuilder("ignite-compute", path);
}
/**
* Ignite Compute (camel-ignite)
* Run compute operations on an Ignite cluster.
*
* Category: cache,clustering
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-ignite
*
* Syntax: <code>ignite-compute:endpointId</code>
*
* Path parameter: endpointId (required)
* The endpoint ID (not used).
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path endpointId
* @return the dsl builder
*/
default IgniteComputeEndpointBuilder igniteCompute(String componentName, String path) {
return IgniteComputeEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the Ignite Compute component.
*/
public static | IgniteComputeBuilders |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/InvalidPathException.java | {
"start": 1199,
"end": 1858
} | class ____ extends HadoopIllegalArgumentException {
private static final long serialVersionUID = 1L;
/**
* Constructs exception with the specified detail message.
*
* @param path invalid path.
*/
public InvalidPathException(final String path) {
super("Invalid path name " + path);
}
/**
* Constructs exception with the specified detail message.
*
* @param path invalid path.
* @param reason Reason <code>path</code> is invalid
*/
public InvalidPathException(final String path, final String reason) {
super("Invalid path " + path
+ (reason == null ? "" : ". (" + reason + ")"));
}
}
| InvalidPathException |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/CharacterTypeTest.java | {
"start": 1641,
"end": 2098
} | class ____ {
@Id
private Integer id;
private Character characterData;
public TestEntity() {
}
public TestEntity(Integer id, Character characterData) {
this.id = id;
this.characterData = characterData;
}
public Integer getId() {
return id;
}
public void setCharacterData(Character characterData) {
this.characterData = characterData;
}
public Character getCharacterData() {
return characterData;
}
}
}
| TestEntity |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/OracleIsEmptyTest.java | {
"start": 882,
"end": 1506
} | class ____ extends TestCase {
public void test_isEmpty() throws Exception {
String sql = "SELECT product_id, TO_CHAR(ad_finaltext) FROM print_media WHERE ad_textdocs_ntab IS NOT EMPTY;";
String expect = "SELECT product_id, TO_CHAR(ad_finaltext)\n" + "FROM print_media\n"
+ "WHERE ad_textdocs_ntab IS NOT EMPTY;";
OracleStatementParser parser = new OracleStatementParser(sql);
SQLSelectStatement stmt = (SQLSelectStatement) parser.parseStatementList().get(0);
String text = TestUtils.outputOracle(stmt);
assertEquals(expect, text);
}
}
| OracleIsEmptyTest |
java | apache__flink | flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java | {
"start": 72254,
"end": 77105
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(ApplicationSubmissionContextReflector.class);
private static final ApplicationSubmissionContextReflector instance =
new ApplicationSubmissionContextReflector(ApplicationSubmissionContext.class);
public static ApplicationSubmissionContextReflector getInstance() {
return instance;
}
private static final String APPLICATION_TAGS_METHOD_NAME = "setApplicationTags";
private static final String ATTEMPT_FAILURES_METHOD_NAME =
"setAttemptFailuresValidityInterval";
private static final String KEEP_CONTAINERS_METHOD_NAME =
"setKeepContainersAcrossApplicationAttempts";
private static final String NODE_LABEL_EXPRESSION_NAME = "setNodeLabelExpression";
private final Method applicationTagsMethod;
private final Method attemptFailuresValidityIntervalMethod;
private final Method keepContainersMethod;
@Nullable private final Method nodeLabelExpressionMethod;
private ApplicationSubmissionContextReflector(Class<ApplicationSubmissionContext> clazz) {
// this method is only supported by Hadoop 2.4.0 onwards
this.applicationTagsMethod = getMethod(clazz, APPLICATION_TAGS_METHOD_NAME, Set.class);
// this method is only supported by Hadoop 2.6.0 onwards
this.attemptFailuresValidityIntervalMethod =
getMethod(clazz, ATTEMPT_FAILURES_METHOD_NAME, long.class);
// this method is only supported by Hadoop 2.4.0 onwards
this.keepContainersMethod =
getMethod(clazz, KEEP_CONTAINERS_METHOD_NAME, boolean.class);
this.nodeLabelExpressionMethod =
getMethod(clazz, NODE_LABEL_EXPRESSION_NAME, String.class);
}
private Method getMethod(Class<?> clazz, String methodName, Class<?>... paramTypes) {
try {
Method method = clazz.getMethod(methodName, paramTypes);
LOG.debug("{} supports method {}.", clazz.getCanonicalName(), methodName);
return method;
} catch (NoSuchMethodException e) {
LOG.debug("{} does not support method {}.", clazz.getCanonicalName(), methodName);
// assign null because the Hadoop version apparently does not support this call.
return null;
}
}
private void invokeMethod(
Method method,
String methodName,
ApplicationSubmissionContext context,
Object... args)
throws InvocationTargetException, IllegalAccessException {
if (method != null) {
LOG.debug(
"Calling method {} of {}.",
methodName,
context.getClass().getCanonicalName());
method.invoke(context, args);
} else {
LOG.debug(
"{} does not support method {}. Doing nothing.",
context.getClass().getCanonicalName(),
methodName);
}
}
public void setApplicationTags(
ApplicationSubmissionContext appContext, Set<String> applicationTags)
throws InvocationTargetException, IllegalAccessException {
invokeMethod(
applicationTagsMethod,
APPLICATION_TAGS_METHOD_NAME,
appContext,
applicationTags);
}
public void setApplicationNodeLabel(
ApplicationSubmissionContext appContext, String nodeLabel)
throws InvocationTargetException, IllegalAccessException {
invokeMethod(
nodeLabelExpressionMethod, NODE_LABEL_EXPRESSION_NAME, appContext, nodeLabel);
}
public void setAttemptFailuresValidityInterval(
ApplicationSubmissionContext appContext, long validityInterval)
throws InvocationTargetException, IllegalAccessException {
invokeMethod(
attemptFailuresValidityIntervalMethod,
ATTEMPT_FAILURES_METHOD_NAME,
appContext,
validityInterval);
}
public void setKeepContainersAcrossApplicationAttempts(
ApplicationSubmissionContext appContext, boolean keepContainers)
throws InvocationTargetException, IllegalAccessException {
invokeMethod(
keepContainersMethod, KEEP_CONTAINERS_METHOD_NAME, appContext, keepContainers);
}
}
private static | ApplicationSubmissionContextReflector |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/TestNodeAttributesManager.java | {
"start": 1939,
"end": 15035
} | class ____ {
private NodeAttributesManager attributesManager;
private final static String[] PREFIXES =
new String[] {"yarn.test1.io", "yarn.test2.io", "yarn.test3.io"};
private final static String[] HOSTNAMES =
new String[] {"host1", "host2", "host3"};
@BeforeEach
public void init() throws IOException {
Configuration conf = new Configuration();
attributesManager = new NodeAttributesManagerImpl();
conf.setClass(YarnConfiguration.FS_NODE_ATTRIBUTE_STORE_IMPL_CLASS,
FileSystemNodeAttributeStore.class, NodeAttributeStore.class);
conf = NodeAttributeTestUtils.getRandomDirConf(conf);
attributesManager.init(conf);
attributesManager.start();
}
@AfterEach
public void cleanUp() {
if (attributesManager != null) {
attributesManager.stop();
}
}
private Set<NodeAttribute> createAttributesForTest(String attributePrefix,
int numOfAttributes, String attributeNamePrefix,
String attributeValuePrefix) {
Set<NodeAttribute> attributes = new HashSet<>();
for (int i = 0; i< numOfAttributes; i++) {
NodeAttribute attribute = NodeAttribute.newInstance(
attributePrefix, attributeNamePrefix + "_" + i,
NodeAttributeType.STRING, attributeValuePrefix + "_" + i);
attributes.add(attribute);
}
return attributes;
}
private boolean sameAttributeSet(Set<NodeAttribute> set1,
Set<NodeAttribute> set2) {
return Sets.difference(set1, set2).isEmpty();
}
@Test
public void testAddNodeAttributes() throws IOException {
Map<String, Set<NodeAttribute>> toAddAttributes = new HashMap<>();
Map<NodeAttribute, AttributeValue> nodeAttributes;
// Add 3 attributes to host1
// yarn.test1.io/A1=host1_v1_1
// yarn.test1.io/A2=host1_v1_2
// yarn.test1.io/A3=host1_v1_3
toAddAttributes.put(HOSTNAMES[0],
createAttributesForTest(PREFIXES[0], 3, "A", "host1_v1"));
attributesManager.addNodeAttributes(toAddAttributes);
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(3, nodeAttributes.size());
assertTrue(sameAttributeSet(toAddAttributes.get(HOSTNAMES[0]),
nodeAttributes.keySet()));
// Add 2 attributes to host2
// yarn.test1.io/A1=host2_v1_1
// yarn.test1.io/A2=host2_v1_2
toAddAttributes.clear();
toAddAttributes.put(HOSTNAMES[1],
createAttributesForTest(PREFIXES[0], 2, "A", "host2_v1"));
attributesManager.addNodeAttributes(toAddAttributes);
// Verify host1 attributes are still valid.
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(3, nodeAttributes.size());
// Verify new added host2 attributes are correctly updated.
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[1]);
assertEquals(2, nodeAttributes.size());
assertTrue(sameAttributeSet(toAddAttributes.get(HOSTNAMES[1]),
nodeAttributes.keySet()));
// Cluster wide, it only has 3 attributes.
// yarn.test1.io/A1
// yarn.test1.io/A2
// yarn.test1.io/A3
Set<NodeAttribute> clusterAttributes = attributesManager
.getClusterNodeAttributes(Sets.newHashSet(PREFIXES[0]));
assertEquals(3, clusterAttributes.size());
// Query for attributes under a non-exist prefix,
// ensure it returns an empty set.
clusterAttributes = attributesManager
.getClusterNodeAttributes(Sets.newHashSet("non_exist_prefix"));
assertEquals(0, clusterAttributes.size());
// Not provide any prefix, ensure it returns all attributes.
clusterAttributes = attributesManager.getClusterNodeAttributes(null);
assertEquals(3, clusterAttributes.size());
// Add some other attributes with different prefixes on host1 and host2.
toAddAttributes.clear();
// Host1
// yarn.test2.io/A_1=host1_v2_1
// ...
// yarn.test2.io/A_10=host1_v2_10
toAddAttributes.put(HOSTNAMES[0],
createAttributesForTest(PREFIXES[1], 10, "C", "host1_v2"));
// Host2
// yarn.test2.io/C_1=host1_v2_1
// ...
// yarn.test2.io/C_20=host1_v2_20
toAddAttributes.put(HOSTNAMES[1],
createAttributesForTest(PREFIXES[1], 20, "C", "host1_v2"));
attributesManager.addNodeAttributes(toAddAttributes);
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(13, nodeAttributes.size());
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[1]);
assertEquals(22, nodeAttributes.size());
}
@Test
public void testRemoveNodeAttributes() throws IOException {
Map<String, Set<NodeAttribute>> toAddAttributes = new HashMap<>();
Map<String, Set<NodeAttribute>> toRemoveAttributes = new HashMap<>();
Set<NodeAttribute> allAttributesPerPrefix = new HashSet<>();
Map<NodeAttribute, AttributeValue> nodeAttributes;
// Host1 -----------------------
// yarn.test1.io
// A1=host1_v1_1
// A2=host1_v1_2
// A3=host1_v1_3
// yarn.test2.io
// B1=host1_v2_1
// ...
// B5=host5_v2_5
// Host2 -----------------------
// yarn.test1.io
// A1=host2_v1_1
// A2=host2_v1_2
// yarn.test3.io
// C1=host2_v3_1
// c2=host2_v3_2
Set<NodeAttribute> host1set = new HashSet<>();
Set<NodeAttribute> host1set1 =
createAttributesForTest(PREFIXES[0], 3, "A", "host1_v1");
Set<NodeAttribute> host1set2 =
createAttributesForTest(PREFIXES[1], 5, "B", "host1_v1");
host1set.addAll(host1set1);
host1set.addAll(host1set2);
Set<NodeAttribute> host2set = new HashSet<>();
Set<NodeAttribute> host2set1 =
createAttributesForTest(PREFIXES[0], 2, "A", "host2_v1");
Set<NodeAttribute> host2set2 =
createAttributesForTest(PREFIXES[2], 2, "C", "host2_v3");
host2set.addAll(host2set1);
host2set.addAll(host2set2);
toAddAttributes.put(HOSTNAMES[0], host1set);
toAddAttributes.put(HOSTNAMES[1], host2set);
attributesManager.addNodeAttributes(toAddAttributes);
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(8, nodeAttributes.size());
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[1]);
assertEquals(4, nodeAttributes.size());
allAttributesPerPrefix = attributesManager
.getClusterNodeAttributes(Sets.newHashSet(PREFIXES[0]));
assertEquals(3, allAttributesPerPrefix.size());
allAttributesPerPrefix = attributesManager
.getClusterNodeAttributes(Sets.newHashSet(PREFIXES[1]));
assertEquals(5, allAttributesPerPrefix.size());
allAttributesPerPrefix = attributesManager
.getClusterNodeAttributes(Sets.newHashSet(PREFIXES[2]));
assertEquals(2, allAttributesPerPrefix.size());
// Remove "yarn.test1.io/A_2" from host1
Set<NodeAttribute> attributes2rm1 = new HashSet<>();
attributes2rm1.add(NodeAttribute.newInstance(PREFIXES[0], "A_2",
NodeAttributeType.STRING, "anyValue"));
toRemoveAttributes.put(HOSTNAMES[0], attributes2rm1);
attributesManager.removeNodeAttributes(toRemoveAttributes);
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(7, nodeAttributes.size());
// Remove again, but give a non-exist attribute name
attributes2rm1.clear();
toRemoveAttributes.clear();
attributes2rm1.add(NodeAttribute.newInstance(PREFIXES[0], "non_exist_name",
NodeAttributeType.STRING, "anyValue"));
toRemoveAttributes.put(HOSTNAMES[0], attributes2rm1);
attributesManager.removeNodeAttributes(toRemoveAttributes);
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(7, nodeAttributes.size());
// Remove "yarn.test1.io/A_2" from host2 too,
// by then there will be no such attribute exist in the cluster.
Set<NodeAttribute> attributes2rm2 = new HashSet<>();
attributes2rm2.add(NodeAttribute.newInstance(PREFIXES[0], "A_2",
NodeAttributeType.STRING, "anyValue"));
toRemoveAttributes.clear();
toRemoveAttributes.put(HOSTNAMES[1], attributes2rm2);
attributesManager.removeNodeAttributes(toRemoveAttributes);
// Make sure cluster wide attributes are still consistent.
// Since both host1 and host2 doesn't have "yarn.test1.io/A_2",
// get all attributes under prefix "yarn.test1.io" should only return
// us A_1 and A_3.
allAttributesPerPrefix = attributesManager
.getClusterNodeAttributes(Sets.newHashSet(PREFIXES[0]));
assertEquals(2, allAttributesPerPrefix.size());
}
@Test
public void testReplaceNodeAttributes() throws IOException {
Map<String, Set<NodeAttribute>> toAddAttributes = new HashMap<>();
Map<String, Set<NodeAttribute>> toReplaceMap = new HashMap<>();
Map<NodeAttribute, AttributeValue> nodeAttributes;
Set<NodeAttribute> filteredAttributes;
Set<NodeAttribute> clusterAttributes;
// Add 3 attributes to host1
// yarn.test1.io/A1=host1_v1_1
// yarn.test1.io/A2=host1_v1_2
// yarn.test1.io/A3=host1_v1_3
toAddAttributes.put(HOSTNAMES[0],
createAttributesForTest(PREFIXES[0], 3, "A", "host1_v1"));
attributesManager.addNodeAttributes(toAddAttributes);
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(3, nodeAttributes.size());
// Add 10 distributed node attributes to host1
// nn.yarn.io/dist-node-attribute1=dist_v1_1
// nn.yarn.io/dist-node-attribute2=dist_v1_2
// ...
// nn.yarn.io/dist-node-attribute10=dist_v1_10
toAddAttributes.clear();
toAddAttributes.put(HOSTNAMES[0],
createAttributesForTest(NodeAttribute.PREFIX_DISTRIBUTED,
10, "dist-node-attribute", "dist_v1"));
attributesManager.addNodeAttributes(toAddAttributes);
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(13, nodeAttributes.size());
clusterAttributes = attributesManager.getClusterNodeAttributes(
Sets.newHashSet(NodeAttribute.PREFIX_DISTRIBUTED, PREFIXES[0]));
assertEquals(13, clusterAttributes.size());
// Replace by prefix
// Same distributed attributes names, but different values.
Set<NodeAttribute> toReplaceAttributes =
createAttributesForTest(NodeAttribute.PREFIX_DISTRIBUTED, 5,
"dist-node-attribute", "dist_v2");
attributesManager.replaceNodeAttributes(NodeAttribute.PREFIX_DISTRIBUTED,
ImmutableMap.of(HOSTNAMES[0], toReplaceAttributes));
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(8, nodeAttributes.size());
clusterAttributes = attributesManager.getClusterNodeAttributes(
Sets.newHashSet(NodeAttribute.PREFIX_DISTRIBUTED, PREFIXES[0]));
assertEquals(8, clusterAttributes.size());
// Now we have 5 distributed attributes
filteredAttributes = NodeLabelUtil.filterAttributesByPrefix(
nodeAttributes.keySet(), NodeAttribute.PREFIX_DISTRIBUTED);
assertEquals(5, filteredAttributes.size());
// Values are updated to have prefix dist_v2
assertTrue(filteredAttributes.stream().allMatch(
nodeAttribute ->
nodeAttribute.getAttributeValue().startsWith("dist_v2")));
// We still have 3 yarn.test1.io attributes
filteredAttributes = NodeLabelUtil.filterAttributesByPrefix(
nodeAttributes.keySet(), PREFIXES[0]);
assertEquals(3, filteredAttributes.size());
// Replace with prefix
// Different attribute names
toReplaceAttributes =
createAttributesForTest(NodeAttribute.PREFIX_DISTRIBUTED, 1,
"dist-node-attribute-v2", "dist_v3");
attributesManager.replaceNodeAttributes(NodeAttribute.PREFIX_DISTRIBUTED,
ImmutableMap.of(HOSTNAMES[0], toReplaceAttributes));
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(4, nodeAttributes.size());
clusterAttributes = attributesManager.getClusterNodeAttributes(
Sets.newHashSet(NodeAttribute.PREFIX_DISTRIBUTED));
assertEquals(1, clusterAttributes.size());
NodeAttribute attr = clusterAttributes.iterator().next();
assertEquals("dist-node-attribute-v2_0",
attr.getAttributeKey().getAttributeName());
assertEquals(NodeAttribute.PREFIX_DISTRIBUTED,
attr.getAttributeKey().getAttributePrefix());
assertEquals("dist_v3_0", attr.getAttributeValue());
// Replace all attributes
toReplaceMap.put(HOSTNAMES[0],
createAttributesForTest(PREFIXES[1], 2, "B", "B_v1"));
attributesManager.replaceNodeAttributes(null, toReplaceMap);
nodeAttributes = attributesManager.getAttributesForNode(HOSTNAMES[0]);
assertEquals(2, nodeAttributes.size());
clusterAttributes = attributesManager
.getClusterNodeAttributes(Sets.newHashSet(PREFIXES[1]));
assertEquals(2, clusterAttributes.size());
clusterAttributes = attributesManager
.getClusterNodeAttributes(Sets.newHashSet(
NodeAttribute.PREFIX_DISTRIBUTED));
assertEquals(0, clusterAttributes.size());
}
}
| TestNodeAttributesManager |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/processor/internals/SerdeGetter.java | {
"start": 1139,
"end": 1764
} | class ____ {
private final Supplier<Serde<?>> keySerdeSupplier;
private final Supplier<Serde<?>> valueSerdeSupplier;
public SerdeGetter(final ProcessorContext<?, ?> context) {
keySerdeSupplier = context::keySerde;
valueSerdeSupplier = context::valueSerde;
}
public SerdeGetter(final StateStoreContext context) {
keySerdeSupplier = context::keySerde;
valueSerdeSupplier = context::valueSerde;
}
public Serde<?> keySerde() {
return keySerdeSupplier.get();
}
public Serde<?> valueSerde() {
return valueSerdeSupplier.get();
}
}
| SerdeGetter |
java | netty__netty | microbench/src/main/java/io/netty/handler/codec/http/QueryStringEncoderBenchmark.java | {
"start": 1155,
"end": 2819
} | class ____ extends AbstractMicrobenchmark {
private String shortAscii;
private String shortUtf8;
private String shortAsciiFirst;
private String longAscii;
private String longUtf8;
private String longAsciiFirst;
@Setup
public void setUp() {
// Avoid constant pool for strings since it's common for at least values to not be constant.
shortAscii = new String("foo".toCharArray());
shortUtf8 = new String("ほげほげ".toCharArray());
shortAsciiFirst = shortAscii + shortUtf8;
longAscii = repeat(shortAscii, 100);
longUtf8 = repeat(shortUtf8, 100);
longAsciiFirst = longAscii + longUtf8;
}
@Benchmark
public String shortAscii() {
return encode(shortAscii);
}
@Benchmark
public String shortUtf8() {
return encode(shortUtf8);
}
@Benchmark
public String shortAsciiFirst() {
return encode(shortAsciiFirst);
}
@Benchmark
public String longAscii() {
return encode(longAscii);
}
@Benchmark
public String longUtf8() {
return encode(longUtf8);
}
@Benchmark
public String longAsciiFirst() {
return encode(longAsciiFirst);
}
private static String encode(String s) {
QueryStringEncoder encoder = new QueryStringEncoder("");
encoder.addParam(s, s);
return encoder.toString();
}
private static String repeat(String s, int num) {
StringBuilder sb = new StringBuilder(num * s.length());
for (int i = 0; i < num; i++) {
sb.append(s);
}
return sb.toString();
}
}
| QueryStringEncoderBenchmark |
java | grpc__grpc-java | okhttp/third_party/okhttp/main/java/io/grpc/okhttp/internal/framed/Http2.java | {
"start": 15082,
"end": 23843
} | class ____ implements io.grpc.okhttp.internal.framed.FrameWriter {
private final BufferedSink sink;
private final boolean client;
private final Buffer hpackBuffer;
private final Hpack.Writer hpackWriter;
private int maxFrameSize;
private boolean closed;
Writer(BufferedSink sink, boolean client) {
this.sink = sink;
this.client = client;
this.hpackBuffer = new Buffer();
this.hpackWriter = new Hpack.Writer(hpackBuffer);
this.maxFrameSize = INITIAL_MAX_FRAME_SIZE;
}
@Override public synchronized void flush() throws IOException {
if (closed) throw new IOException("closed");
sink.flush();
}
@Override public synchronized void ackSettings(io.grpc.okhttp.internal.framed.Settings peerSettings) throws IOException {
if (closed) throw new IOException("closed");
this.maxFrameSize = peerSettings.getMaxFrameSize(maxFrameSize);
int length = 0;
byte type = TYPE_SETTINGS;
byte flags = FLAG_ACK;
int streamId = 0;
frameHeader(streamId, length, type, flags);
sink.flush();
}
@Override public synchronized void connectionPreface() throws IOException {
if (closed) throw new IOException("closed");
if (!client) return; // Nothing to write; servers don't send connection headers!
if (logger.isLoggable(FINE)) {
logger.fine(format(">> CONNECTION %s", CONNECTION_PREFACE.hex()));
}
sink.write(CONNECTION_PREFACE.toByteArray());
sink.flush();
}
@Override public synchronized void synStream(boolean outFinished, boolean inFinished,
int streamId, int associatedStreamId, List<Header> headerBlock)
throws IOException {
if (inFinished) throw new UnsupportedOperationException();
if (closed) throw new IOException("closed");
headers(outFinished, streamId, headerBlock);
}
@Override public synchronized void synReply(boolean outFinished, int streamId,
List<Header> headerBlock) throws IOException {
if (closed) throw new IOException("closed");
headers(outFinished, streamId, headerBlock);
}
@Override public synchronized void headers(int streamId, List<Header> headerBlock)
throws IOException {
if (closed) throw new IOException("closed");
headers(false, streamId, headerBlock);
}
@Override public synchronized void pushPromise(int streamId, int promisedStreamId,
List<Header> requestHeaders) throws IOException {
if (closed) throw new IOException("closed");
hpackWriter.writeHeaders(requestHeaders);
long byteCount = hpackBuffer.size();
int length = (int) Math.min(maxFrameSize - 4, byteCount);
byte type = TYPE_PUSH_PROMISE;
byte flags = byteCount == length ? FLAG_END_HEADERS : 0;
frameHeader(streamId, length + 4, type, flags);
sink.writeInt(promisedStreamId & 0x7fffffff);
sink.write(hpackBuffer, length);
if (byteCount > length) writeContinuationFrames(streamId, byteCount - length);
}
void headers(boolean outFinished, int streamId, List<Header> headerBlock) throws IOException {
if (closed) throw new IOException("closed");
hpackWriter.writeHeaders(headerBlock);
long byteCount = hpackBuffer.size();
int length = (int) Math.min(maxFrameSize, byteCount);
byte type = TYPE_HEADERS;
byte flags = byteCount == length ? FLAG_END_HEADERS : 0;
if (outFinished) flags |= FLAG_END_STREAM;
frameHeader(streamId, length, type, flags);
sink.write(hpackBuffer, length);
if (byteCount > length) writeContinuationFrames(streamId, byteCount - length);
}
private void writeContinuationFrames(int streamId, long byteCount) throws IOException {
while (byteCount > 0) {
int length = (int) Math.min(maxFrameSize, byteCount);
byteCount -= length;
frameHeader(streamId, length, TYPE_CONTINUATION, byteCount == 0 ? FLAG_END_HEADERS : 0);
sink.write(hpackBuffer, length);
}
}
@Override public synchronized void rstStream(int streamId, io.grpc.okhttp.internal.framed.ErrorCode errorCode)
throws IOException {
if (closed) throw new IOException("closed");
if (errorCode.httpCode == -1) throw new IllegalArgumentException();
int length = 4;
byte type = TYPE_RST_STREAM;
byte flags = FLAG_NONE;
frameHeader(streamId, length, type, flags);
sink.writeInt(errorCode.httpCode);
sink.flush();
}
@Override public int maxDataLength() {
return maxFrameSize;
}
@Override public synchronized void data(boolean outFinished, int streamId, Buffer source,
int byteCount) throws IOException {
if (closed) throw new IOException("closed");
byte flags = FLAG_NONE;
if (outFinished) flags |= FLAG_END_STREAM;
dataFrame(streamId, flags, source, byteCount);
}
void dataFrame(int streamId, byte flags, Buffer buffer, int byteCount) throws IOException {
byte type = TYPE_DATA;
frameHeader(streamId, byteCount, type, flags);
if (byteCount > 0) {
sink.write(buffer, byteCount);
}
}
@Override public synchronized void settings(io.grpc.okhttp.internal.framed.Settings settings) throws IOException {
if (closed) throw new IOException("closed");
int length = settings.size() * 6;
byte type = TYPE_SETTINGS;
byte flags = FLAG_NONE;
int streamId = 0;
frameHeader(streamId, length, type, flags);
for (int i = 0; i < io.grpc.okhttp.internal.framed.Settings.COUNT; i++) {
if (!settings.isSet(i)) continue;
int id = i;
if (id == 4) id = 3; // SETTINGS_MAX_CONCURRENT_STREAMS renumbered.
else if (id == 7) id = 4; // SETTINGS_INITIAL_WINDOW_SIZE renumbered.
sink.writeShort(id);
sink.writeInt(settings.get(i));
}
sink.flush();
}
@Override public synchronized void ping(boolean ack, int payload1, int payload2)
throws IOException {
if (closed) throw new IOException("closed");
int length = 8;
byte type = TYPE_PING;
byte flags = ack ? FLAG_ACK : FLAG_NONE;
int streamId = 0;
frameHeader(streamId, length, type, flags);
sink.writeInt(payload1);
sink.writeInt(payload2);
sink.flush();
}
@Override public synchronized void goAway(int lastGoodStreamId, io.grpc.okhttp.internal.framed.ErrorCode errorCode,
byte[] debugData) throws IOException {
if (closed) throw new IOException("closed");
if (errorCode.httpCode == -1) throw illegalArgument("errorCode.httpCode == -1");
int length = 8 + debugData.length;
byte type = TYPE_GOAWAY;
byte flags = FLAG_NONE;
int streamId = 0;
frameHeader(streamId, length, type, flags);
sink.writeInt(lastGoodStreamId);
sink.writeInt(errorCode.httpCode);
if (debugData.length > 0) {
sink.write(debugData);
}
sink.flush();
}
@Override public synchronized void windowUpdate(int streamId, long windowSizeIncrement)
throws IOException {
if (closed) throw new IOException("closed");
if (windowSizeIncrement == 0 || windowSizeIncrement > 0x7fffffffL) {
throw illegalArgument("windowSizeIncrement == 0 || windowSizeIncrement > 0x7fffffffL: %s",
windowSizeIncrement);
}
int length = 4;
byte type = TYPE_WINDOW_UPDATE;
byte flags = FLAG_NONE;
frameHeader(streamId, length, type, flags);
sink.writeInt((int) windowSizeIncrement);
sink.flush();
}
@Override public synchronized void close() throws IOException {
closed = true;
sink.close();
}
void frameHeader(int streamId, int length, byte type, byte flags) throws IOException {
if (logger.isLoggable(FINE)) logger.fine(formatHeader(false, streamId, length, type, flags));
if (length > maxFrameSize) {
throw illegalArgument("FRAME_SIZE_ERROR length > %d: %d", maxFrameSize, length);
}
if ((streamId & 0x80000000) != 0) throw illegalArgument("reserved bit set: %s", streamId);
writeMedium(sink, length);
sink.writeByte(type & 0xff);
sink.writeByte(flags & 0xff);
sink.writeInt(streamId & 0x7fffffff);
}
}
@FormatMethod
private static IllegalArgumentException illegalArgument(String message, Object... args) {
throw new IllegalArgumentException(format(Locale.US, message, args));
}
@FormatMethod
private static IOException ioException(String message, Object... args) throws IOException {
throw new IOException(format(Locale.US, message, args));
}
/**
* Decompression of the header block occurs above the framing layer. This
* | Writer |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLNumericLiteralExpr.java | {
"start": 774,
"end": 1158
} | class ____ extends SQLTypeExpr {
public SQLNumericLiteralExpr(SQLDataType sqlDataType) {
super(sqlDataType);
}
public abstract Number getNumber();
public abstract void setNumber(Number number);
public abstract SQLNumericLiteralExpr clone();
@Override
public List getChildren() {
return Collections.emptyList();
}
}
| SQLNumericLiteralExpr |
java | micronaut-projects__micronaut-core | http-server-netty/src/main/java/io/micronaut/http/server/netty/handler/Http2ServerHandler.java | {
"start": 11778,
"end": 14594
} | class ____ extends AbstractHttp2ConnectionHandlerBuilder<ConnectionHandler, ConnectionHandlerBuilder> {
private final Http2ServerHandler frameListener;
private Http2AccessLogManager.Factory accessLogManagerFactory;
private Http2AccessLogManager accessLogManager;
public ConnectionHandlerBuilder(RequestHandler requestHandler) {
frameListener = new Http2ServerHandler(requestHandler);
}
@Override
public ConnectionHandlerBuilder frameLogger(Http2FrameLogger frameLogger) {
return super.frameLogger(frameLogger);
}
@Override
public ConnectionHandlerBuilder validateHeaders(boolean validateHeaders) {
return super.validateHeaders(validateHeaders);
}
@Override
public ConnectionHandlerBuilder initialSettings(Http2Settings settings) {
return super.initialSettings(settings);
}
public ConnectionHandlerBuilder accessLogManagerFactory(Http2AccessLogManager.@Nullable Factory accessLogManagerFactory) {
this.accessLogManagerFactory = accessLogManagerFactory;
return this;
}
public ConnectionHandlerBuilder compressor(HttpCompressionStrategy compressionStrategy) {
if (compressionStrategy.isEnabled()) {
frameListener.compressor(new Compressor(compressionStrategy));
}
return this;
}
public ConnectionHandlerBuilder bodySizeLimits(BodySizeLimits bodySizeLimits) {
frameListener.bodySizeLimits = bodySizeLimits;
return this;
}
@Override
public ConnectionHandler build() {
connection(new DefaultHttp2Connection(isServer(), maxReservedStreams()));
Http2FrameListener fl = new DelegatingDecompressorFrameListener(connection(), frameListener, false);
if (accessLogManagerFactory != null) {
accessLogManager = new Http2AccessLogManager(accessLogManagerFactory, connection());
fl = new Http2AccessLogFrameListener(fl, accessLogManager);
}
frameListener(fl);
return super.build();
}
@Override
protected ConnectionHandler build(Http2ConnectionDecoder decoder, Http2ConnectionEncoder encoder, Http2Settings initialSettings) throws Exception {
if (accessLogManager != null) {
encoder = new Http2AccessLogConnectionEncoder(encoder, accessLogManager);
}
ConnectionHandler ch = new ConnectionHandler(decoder, encoder, initialSettings, decoupleCloseAndGoAway(), flushPreface(), frameListener, accessLogManager);
frameListener.init(ch);
return ch;
}
}
private final | ConnectionHandlerBuilder |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/DomUtilsTest.java | {
"start": 1349,
"end": 32072
} | class ____ {
@Test
void testFindChildElement() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element buildElement = DomUtils.findChildElement(root, "build");
assertNotNull(buildElement, "Should find build element");
Element pluginsElement = DomUtils.findChildElement(buildElement, "plugins");
assertNotNull(pluginsElement, "Should find plugins element");
}
@Test
void testInsertNewElement() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
</plugin>
</plugins>
</build>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element buildElement = DomUtils.findChildElement(root, "build");
Element pluginManagementElement = DomUtils.insertNewElement("pluginManagement", buildElement);
assertNotNull(pluginManagementElement, "Should create pluginManagement element");
// Verify it was added to the document
String xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<pluginManagement>"), "Should contain pluginManagement element");
}
@Test
void testInsertContentElement() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element descriptionElement = DomUtils.insertContentElement(root, "description", "Test project description");
assertNotNull(descriptionElement, "Should create description element");
// Verify it was added to the document with content
String xmlOutput = DomUtils.toXml(doc);
assertTrue(
xmlOutput.contains("<description>Test project description</description>"),
"Should contain description element with content");
}
@Test
void testToXml() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document doc = Document.of(pomXml);
String xmlOutput = DomUtils.toXml(doc);
assertNotNull(xmlOutput, "Should produce XML output");
assertTrue(xmlOutput.contains("<project"), "Should contain project element");
assertTrue(xmlOutput.contains("<modelVersion>4.0.0</modelVersion>"), "Should contain modelVersion");
assertTrue(xmlOutput.contains("<groupId>test</groupId>"), "Should contain groupId");
}
@Test
void testElementOrderingInProject() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Insert elements that should be ordered according to ELEMENT_ORDER
DomUtils.insertContentElement(root, "description", "Test description");
DomUtils.insertContentElement(root, "name", "Test Project");
DomUtils.insertNewElement("properties", root);
String xmlOutput = DomUtils.toXml(doc);
// Verify that elements appear in the correct order according to ELEMENT_ORDER
int nameIndex = xmlOutput.indexOf("<name>");
int descriptionIndex = xmlOutput.indexOf("<description>");
int propertiesIndex = xmlOutput.indexOf("<properties>");
assertTrue(nameIndex > 0, "Should contain name element");
assertTrue(descriptionIndex > 0, "Should contain description element");
assertTrue(propertiesIndex > 0, "Should contain properties element");
// According to ELEMENT_ORDER: name should come before description, and properties should come much later
assertTrue(nameIndex < descriptionIndex, "name should come before description");
assertTrue(descriptionIndex < propertiesIndex, "description should come before properties");
}
@Test
void testInsertElementWithCorrectPositioning() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<properties>
<maven.compiler.source>17</maven.compiler.source>
</properties>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Insert elements that should be positioned according to ELEMENT_ORDER
DomUtils.insertContentElement(root, "name", "Test Project");
DomUtils.insertContentElement(root, "description", "Test description");
DomUtils.insertContentElement(root, "url", "https://example.com");
String xmlOutput = DomUtils.toXml(doc);
// Find positions of all elements
int modelVersionIndex = xmlOutput.indexOf("<modelVersion>");
int groupIdIndex = xmlOutput.indexOf("<groupId>");
int nameIndex = xmlOutput.indexOf("<name>");
int descriptionIndex = xmlOutput.indexOf("<description>");
int urlIndex = xmlOutput.indexOf("<url>");
int propertiesIndex = xmlOutput.indexOf("<properties>");
// Verify correct ordering according to ELEMENT_ORDER for project
assertTrue(modelVersionIndex < groupIdIndex, "modelVersion should come before groupId");
assertTrue(groupIdIndex < nameIndex, "groupId should come before name");
assertTrue(nameIndex < descriptionIndex, "name should come before description");
assertTrue(descriptionIndex < urlIndex, "description should come before url");
assertTrue(urlIndex < propertiesIndex, "url should come before properties");
}
@Test
void testInsertElementBetweenExistingElements() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<name>Test Project</name>
<url>https://example.com</url>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Insert description between name and url
DomUtils.insertContentElement(root, "description", "Test description");
String xmlOutput = DomUtils.toXml(doc);
int nameIndex = xmlOutput.indexOf("<name>");
int descriptionIndex = xmlOutput.indexOf("<description>");
int urlIndex = xmlOutput.indexOf("<url>");
// Verify description is inserted between name and url
assertTrue(nameIndex < descriptionIndex, "name should come before description");
assertTrue(descriptionIndex < urlIndex, "description should come before url");
}
@Test
void testInsertElementNotInOrdering() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Insert an element that's not in the ELEMENT_ORDER (should be appended at the end)
DomUtils.insertContentElement(root, "customElement", "custom value");
String xmlOutput = DomUtils.toXml(doc);
int versionIndex = xmlOutput.indexOf("<version>");
int customElementIndex = xmlOutput.indexOf("<customElement>");
// Custom element should be appended at the end
assertTrue(customElementIndex > versionIndex, "customElement should come after version");
assertTrue(
xmlOutput.contains("<customElement>custom value</customElement>"),
"Should contain custom element with content");
}
@Test
void testInsertElementInParentWithoutOrdering() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<customParent>
<existingChild>value</existingChild>
</customParent>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element customParent = root.child("customParent").orElse(null);
assertNotNull(customParent, "customParent should exist");
// Insert element in parent that has no ordering defined
DomUtils.insertContentElement(customParent, "newChild", "new value");
String xmlOutput = DomUtils.toXml(doc);
// Should be appended at the end since no ordering is defined for customParent
assertTrue(xmlOutput.contains("<newChild>new value</newChild>"), "Should contain new child element");
int existingChildIndex = xmlOutput.indexOf("<existingChild>");
int newChildIndex = xmlOutput.indexOf("<newChild>");
assertTrue(newChildIndex > existingChildIndex, "newChild should come after existingChild");
}
@Test
void testInsertElementInDependency() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>5.9.0</version>
</dependency>
</dependencies>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element dependencies = root.child("dependencies").orElse(null);
assertNotNull(dependencies, "dependencies should exist");
Element dependency = dependencies.child("dependency").orElse(null);
assertNotNull(dependency, "dependency should exist");
// Insert elements in dependency according to dependency ordering
DomUtils.insertContentElement(dependency, "scope", "test");
DomUtils.insertContentElement(dependency, "type", "jar");
String xmlOutput = DomUtils.toXml(doc);
// Verify dependency element ordering: groupId, artifactId, version, type, scope
int groupIdIndex = xmlOutput.indexOf("<groupId>org.junit.jupiter</groupId>");
int artifactIdIndex = xmlOutput.indexOf("<artifactId>junit-jupiter</artifactId>");
int versionIndex = xmlOutput.indexOf("<version>5.9.0</version>");
int typeIndex = xmlOutput.indexOf("<type>jar</type>");
int scopeIndex = xmlOutput.indexOf("<scope>test</scope>");
assertTrue(groupIdIndex < artifactIdIndex, "groupId should come before artifactId");
assertTrue(artifactIdIndex < versionIndex, "artifactId should come before version");
assertTrue(versionIndex < typeIndex, "version should come before type");
assertTrue(typeIndex < scopeIndex, "type should come before scope");
}
@Test
void testInsertElementInBuild() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<build>
<finalName>test-app</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element build = root.child("build").orElse(null);
assertNotNull(build, "build should exist");
// Insert elements in build according to build ordering
DomUtils.insertContentElement(build, "directory", "target");
DomUtils.insertContentElement(build, "sourceDirectory", "src/main/java");
String xmlOutput = DomUtils.toXml(doc);
// Verify build element ordering: directory, finalName, sourceDirectory, plugins
int directoryIndex = xmlOutput.indexOf("<directory>target</directory>");
int finalNameIndex = xmlOutput.indexOf("<finalName>test-app</finalName>");
int sourceDirectoryIndex = xmlOutput.indexOf("<sourceDirectory>src/main/java</sourceDirectory>");
int pluginsIndex = xmlOutput.indexOf("<plugins>");
assertTrue(directoryIndex < finalNameIndex, "directory should come before finalName");
assertTrue(finalNameIndex < sourceDirectoryIndex, "finalName should come before sourceDirectory");
assertTrue(sourceDirectoryIndex < pluginsIndex, "sourceDirectory should come before plugins");
}
@Test
void testInsertElementWithTextContent() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Insert elements with various text content scenarios
DomUtils.insertContentElement(root, "name", "Test Project Name");
DomUtils.insertContentElement(root, "description", ""); // Empty content
DomUtils.insertContentElement(root, "url", null); // Null content
DomUtils.insertContentElement(root, "inceptionYear", "2023");
String xmlOutput = DomUtils.toXml(doc);
// Verify text content handling
assertTrue(xmlOutput.contains("<name>Test Project Name</name>"), "Should contain name with text content");
assertTrue(
xmlOutput.contains("<description></description>") || xmlOutput.contains("<description/>"),
"Should contain empty description element");
assertTrue(
xmlOutput.contains("<url></url>") || xmlOutput.contains("<url/>"), "Should contain empty url element");
assertTrue(
xmlOutput.contains("<inceptionYear>2023</inceptionYear>"),
"Should contain inceptionYear with text content");
}
@Test
void testInsertNewElementWithoutContent() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Insert empty elements using insertNewElement
Element properties = DomUtils.insertNewElement("properties", root);
Element dependencies = DomUtils.insertNewElement("dependencies", root);
assertNotNull(properties, "properties element should be created");
assertNotNull(dependencies, "dependencies element should be created");
String xmlOutput = DomUtils.toXml(doc);
// Verify elements are created and positioned correctly
int versionIndex = xmlOutput.indexOf("<version>1.0.0</version>");
int propertiesIndex = xmlOutput.indexOf("<properties>");
int dependenciesIndex = xmlOutput.indexOf("<dependencies>");
assertTrue(versionIndex < propertiesIndex, "version should come before properties");
assertTrue(propertiesIndex < dependenciesIndex, "properties should come before dependencies");
// Verify elements are empty
assertTrue(
xmlOutput.contains("<properties></properties>") || xmlOutput.contains("<properties/>"),
"properties should be empty");
assertTrue(
xmlOutput.contains("<dependencies></dependencies>") || xmlOutput.contains("<dependencies/>"),
"dependencies should be empty");
}
@Test
void testInsertMultipleElementsInCorrectOrder() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Insert elements in random order - they should be positioned correctly
DomUtils.insertContentElement(root, "licenses", "");
DomUtils.insertContentElement(root, "name", "Test Project");
DomUtils.insertNewElement("dependencies", root);
DomUtils.insertContentElement(root, "description", "A test project");
DomUtils.insertNewElement("properties", root);
DomUtils.insertContentElement(root, "url", "https://example.com");
DomUtils.insertNewElement("build", root);
String xmlOutput = DomUtils.toXml(doc);
// Find all element positions
int modelVersionIndex = xmlOutput.indexOf("<modelVersion>");
int groupIdIndex = xmlOutput.indexOf("<groupId>");
int nameIndex = xmlOutput.indexOf("<name>");
int descriptionIndex = xmlOutput.indexOf("<description>");
int urlIndex = xmlOutput.indexOf("<url>");
int licensesIndex = xmlOutput.indexOf("<licenses>");
int propertiesIndex = xmlOutput.indexOf("<properties>");
int dependenciesIndex = xmlOutput.indexOf("<dependencies>");
int buildIndex = xmlOutput.indexOf("<build>");
// Verify correct ordering according to ELEMENT_ORDER
assertTrue(modelVersionIndex < groupIdIndex, "modelVersion should come before groupId");
assertTrue(groupIdIndex < nameIndex, "groupId should come before name");
assertTrue(nameIndex < descriptionIndex, "name should come before description");
assertTrue(descriptionIndex < urlIndex, "description should come before url");
assertTrue(urlIndex < licensesIndex, "url should come before licenses");
assertTrue(licensesIndex < propertiesIndex, "licenses should come before properties");
assertTrue(propertiesIndex < dependenciesIndex, "properties should come before dependencies");
assertTrue(dependenciesIndex < buildIndex, "dependencies should come before build");
}
@Test
void testRemoveElement() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<name>Test Project</name>
<description>Test description</description>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element nameElement = DomUtils.findChildElement(root, "name");
// Test removing existing element
DomUtils.removeElement(nameElement);
String xmlOutput = DomUtils.toXml(doc);
assertFalse(xmlOutput.contains("<name>Test Project</name>"), "Should not contain removed name element");
assertTrue(
xmlOutput.contains("<description>Test description</description>"), "Should still contain description");
}
@Test
void testChildTextContent() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<name>Test Project</name>
<description></description>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Test getting text from existing elements
assertEquals("4.0.0", root.childText("modelVersion"), "Should get modelVersion text");
assertEquals("test", root.childText("groupId"), "Should get groupId text");
assertEquals("Test Project", root.childText("name"), "Should get name text");
assertEquals("", root.childText("description"), "Should get empty description text");
// Test getting text from non-existing element
assertNull(root.childText("nonexistent"), "Should return null for non-existing element");
}
@Test
void testAddGAVElements() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<dependencies>
<dependency>
</dependency>
</dependencies>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element dependencies = DomUtils.findChildElement(root, "dependencies");
Element dependency = DomUtils.findChildElement(dependencies, "dependency");
// Test adding GAV elements with version
DomUtils.addGAVElements(dependency, "org.example", "test-artifact", "1.0.0");
String xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<groupId>org.example</groupId>"), "Should contain groupId");
assertTrue(xmlOutput.contains("<artifactId>test-artifact</artifactId>"), "Should contain artifactId");
assertTrue(xmlOutput.contains("<version>1.0.0</version>"), "Should contain version");
// Test adding GAV elements without version
Element dependency2 = DomUtils.insertNewElement("dependency", dependencies);
DomUtils.addGAVElements(dependency2, "org.example", "test-artifact2", null);
xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<artifactId>test-artifact2</artifactId>"), "Should contain second artifactId");
assertFalse(
xmlOutput.contains("test-artifact2</artifactId>\n <version>"),
"Should not add version element for null version");
}
@Test
void testCreateDependency() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<dependencies>
</dependencies>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element dependencies = DomUtils.findChildElement(root, "dependencies");
// Test creating dependency with version
Element dependency = DomUtils.createDependency(dependencies, "org.junit.jupiter", "junit-jupiter", "5.9.0");
assertNotNull(dependency, "Should create dependency element");
String xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<dependency>"), "Should contain dependency element");
assertTrue(xmlOutput.contains("<groupId>org.junit.jupiter</groupId>"), "Should contain groupId");
assertTrue(xmlOutput.contains("<artifactId>junit-jupiter</artifactId>"), "Should contain artifactId");
assertTrue(xmlOutput.contains("<version>5.9.0</version>"), "Should contain version");
// Test creating dependency without version
Element dependency2 = DomUtils.createDependency(dependencies, "org.example", "test-lib", null);
assertNotNull(dependency2, "Should create second dependency element");
xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<artifactId>test-lib</artifactId>"), "Should contain second artifactId");
}
@Test
void testCreatePlugin() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<build>
<plugins>
</plugins>
</build>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
Element build = DomUtils.findChildElement(root, "build");
Element plugins = DomUtils.findChildElement(build, "plugins");
// Test creating plugin with version
Element plugin = DomUtils.createPlugin(plugins, "org.apache.maven.plugins", "maven-compiler-plugin", "3.11.0");
assertNotNull(plugin, "Should create plugin element");
String xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<plugin>"), "Should contain plugin element");
assertTrue(xmlOutput.contains("<groupId>org.apache.maven.plugins</groupId>"), "Should contain groupId");
assertTrue(xmlOutput.contains("<artifactId>maven-compiler-plugin</artifactId>"), "Should contain artifactId");
assertTrue(xmlOutput.contains("<version>3.11.0</version>"), "Should contain version");
// Test creating plugin without version
Element plugin2 = DomUtils.createPlugin(plugins, "org.example", "test-plugin", "");
assertNotNull(plugin2, "Should create second plugin element");
xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<artifactId>test-plugin</artifactId>"), "Should contain second artifactId");
}
@Test
void testUpdateOrCreateChildElement() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
<name>Old Name</name>
</project>
""";
Document doc = Document.of(pomXml);
Element root = doc.root();
// Test updating existing element
Element updatedName = DomUtils.updateOrCreateChildElement(root, "name", "New Name");
assertNotNull(updatedName, "Should return updated element");
String xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<name>New Name</name>"), "Should contain updated name");
assertFalse(xmlOutput.contains("<name>Old Name</name>"), "Should not contain old name");
// Test creating new element
Element description = DomUtils.updateOrCreateChildElement(root, "description", "Test Description");
assertNotNull(description, "Should return created element");
xmlOutput = DomUtils.toXml(doc);
assertTrue(xmlOutput.contains("<description>Test Description</description>"), "Should contain new description");
// Verify element ordering is maintained
int nameIndex = xmlOutput.indexOf("<name>");
int descriptionIndex = xmlOutput.indexOf("<description>");
assertTrue(nameIndex < descriptionIndex, "name should come before description");
}
}
| DomUtilsTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformDestIndexSettingsTests.java | {
"start": 889,
"end": 2959
} | class ____ extends AbstractSerializingTransformTestCase<TransformDestIndexSettings> {
public static TransformDestIndexSettings randomDestIndexSettings() {
int size = randomIntBetween(0, 10);
Map<String, Object> mappings = null;
if (randomBoolean()) {
mappings = Maps.newMapWithExpectedSize(size);
mappings.put("_meta", singletonMap("_transform", singletonMap("version", TransformConfigVersion.CURRENT.toString())));
for (int i = 0; i < size; i++) {
mappings.put(randomAlphaOfLength(10), singletonMap("type", randomAlphaOfLength(10)));
}
}
Settings settings = null;
if (randomBoolean()) {
Settings.Builder settingsBuilder = Settings.builder();
size = randomIntBetween(0, 10);
for (int i = 0; i < size; i++) {
settingsBuilder.put(randomAlphaOfLength(10), randomBoolean());
}
settings = settingsBuilder.build();
}
Set<Alias> aliases = null;
if (randomBoolean()) {
aliases = new HashSet<>();
size = randomIntBetween(0, 10);
for (int i = 0; i < size; i++) {
aliases.add(new Alias(randomAlphaOfLength(10)));
}
}
return new TransformDestIndexSettings(mappings, settings, aliases);
}
@Override
protected TransformDestIndexSettings doParseInstance(XContentParser parser) throws IOException {
return TransformDestIndexSettings.fromXContent(parser);
}
@Override
protected Reader<TransformDestIndexSettings> instanceReader() {
return TransformDestIndexSettings::new;
}
@Override
protected TransformDestIndexSettings createTestInstance() {
return randomDestIndexSettings();
}
@Override
protected TransformDestIndexSettings mutateInstance(TransformDestIndexSettings instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
| TransformDestIndexSettingsTests |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/initializers/annotation/OrderedInitializersAnnotationConfigTests.java | {
"start": 4222,
"end": 4487
} | class ____ implements ApplicationContextInitializer<GenericApplicationContext> {
@Override
public void initialize(GenericApplicationContext applicationContext) {
applicationContext.getEnvironment().setActiveProfiles(PROFILE_TWO);
}
}
}
| OrderedTwoInitializer |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/result/view/AbstractView.java | {
"start": 1848,
"end": 12059
} | class ____ implements View, BeanNameAware, ApplicationContextAware {
/** Well-known name for the RequestDataValueProcessor in the bean factory. */
public static final String REQUEST_DATA_VALUE_PROCESSOR_BEAN_NAME = "requestDataValueProcessor";
/** Logger that is available to subclasses. */
protected final Log logger = LogFactory.getLog(getClass());
private final ReactiveAdapterRegistry adapterRegistry;
private final List<MediaType> mediaTypes = new ArrayList<>(4);
private Charset defaultCharset = StandardCharsets.UTF_8;
private @Nullable String requestContextAttribute;
private @Nullable String beanName;
private @Nullable ApplicationContext applicationContext;
public AbstractView() {
this(ReactiveAdapterRegistry.getSharedInstance());
}
public AbstractView(ReactiveAdapterRegistry reactiveAdapterRegistry) {
this.adapterRegistry = reactiveAdapterRegistry;
this.mediaTypes.add(ViewResolverSupport.DEFAULT_CONTENT_TYPE);
}
/**
* Set the supported media types for this view.
* <p>Default is {@code "text/html;charset=UTF-8"}.
*/
public void setSupportedMediaTypes(List<MediaType> supportedMediaTypes) {
Assert.notEmpty(supportedMediaTypes, "MediaType List must not be empty");
this.mediaTypes.clear();
this.mediaTypes.addAll(supportedMediaTypes);
}
/**
* Get the configured media types supported by this view.
*/
@Override
public List<MediaType> getSupportedMediaTypes() {
return this.mediaTypes;
}
/**
* Set the default charset for this view, used when the
* {@linkplain #setSupportedMediaTypes(List) content type} does not contain one.
* <p>Default is {@linkplain StandardCharsets#UTF_8 UTF 8}.
*/
public void setDefaultCharset(Charset defaultCharset) {
Assert.notNull(defaultCharset, "'defaultCharset' must not be null");
this.defaultCharset = defaultCharset;
}
/**
* Get the default charset, used when the
* {@linkplain #setSupportedMediaTypes(List) content type} does not contain one.
*/
public Charset getDefaultCharset() {
return this.defaultCharset;
}
/**
* Set the name of the {@code RequestContext} attribute for this view.
* <p>Default is none ({@code null}).
*/
public void setRequestContextAttribute(@Nullable String requestContextAttribute) {
this.requestContextAttribute = requestContextAttribute;
}
/**
* Get the name of the {@code RequestContext} attribute for this view, if any.
*/
public @Nullable String getRequestContextAttribute() {
return this.requestContextAttribute;
}
/**
* Set the view's name. Helpful for traceability.
* <p>Framework code must call this when constructing views.
*/
@Override
public void setBeanName(@Nullable String beanName) {
this.beanName = beanName;
}
/**
* Get the view's name.
* <p>Should never be {@code null} if the view was correctly configured.
*/
public @Nullable String getBeanName() {
return this.beanName;
}
@Override
public void setApplicationContext(@Nullable ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
public @Nullable ApplicationContext getApplicationContext() {
return this.applicationContext;
}
/**
* Obtain the {@link ApplicationContext} for actual use.
* @return the {@code ApplicationContext} (never {@code null})
* @throws IllegalStateException if the ApplicationContext cannot be obtained
* @see #getApplicationContext()
*/
protected final ApplicationContext obtainApplicationContext() {
ApplicationContext applicationContext = getApplicationContext();
Assert.state(applicationContext != null, "No ApplicationContext");
return applicationContext;
}
/**
* Prepare the model to render.
* @param model a map with attribute names as keys and corresponding model
* objects as values (the map can also be {@code null} in case of an empty model)
* @param contentType the content type selected to render with, which should
* match one of the {@link #getSupportedMediaTypes() supported media types}
* @param exchange the current exchange
* @return a {@code Mono} that represents when and if rendering succeeds
*/
@Override
public Mono<Void> render(@Nullable Map<String, ?> model, @Nullable MediaType contentType,
ServerWebExchange exchange) {
if (logger.isDebugEnabled()) {
logger.debug(exchange.getLogPrefix() + "View " + formatViewName() +
", model " + (model != null ? model : Collections.emptyMap()));
}
if (contentType != null) {
exchange.getResponse().getHeaders().setContentType(contentType);
}
return getModelAttributes(model, exchange).flatMap(mergedModel -> {
// Expose RequestContext?
if (this.requestContextAttribute != null) {
mergedModel.put(this.requestContextAttribute, createRequestContext(exchange, mergedModel));
}
return renderInternal(mergedModel, contentType, exchange);
});
}
/**
* Prepare the model to use for rendering.
* <p>The default implementation creates a combined output Map that includes
* model as well as static attributes with the former taking precedence.
*/
protected Mono<Map<String, Object>> getModelAttributes(
@Nullable Map<String, ?> model, ServerWebExchange exchange) {
Map<String, Object> attributes;
if (model != null) {
attributes = new ConcurrentHashMap<>(model.size());
for (Map.Entry<String, ?> entry : model.entrySet()) {
if (entry.getValue() != null) {
attributes.put(entry.getKey(), entry.getValue());
}
}
}
else {
attributes = new ConcurrentHashMap<>(0);
}
return resolveAsyncAttributes(attributes, exchange)
.doOnTerminate(() -> exchange.getAttributes().remove(BINDING_CONTEXT_ATTRIBUTE))
.thenReturn(attributes);
}
/**
* Use the configured {@link ReactiveAdapterRegistry} to adapt asynchronous
* attributes to {@code Mono<T>} or {@code Mono<List<T>>} and then wait to
* resolve them into actual values. When the returned {@code Mono<Void>}
* completes, the asynchronous attributes in the model will have been
* replaced with their corresponding resolved values.
* @return result a {@code Mono} that completes when the model is ready
* @since 5.1.8
*/
protected Mono<Void> resolveAsyncAttributes(Map<String, Object> model, ServerWebExchange exchange) {
List<Mono<?>> asyncAttributes = null;
for (Map.Entry<String, ?> entry : model.entrySet()) {
Object value = entry.getValue();
if (value == null) {
continue;
}
ReactiveAdapter adapter = this.adapterRegistry.getAdapter(null, value);
if (adapter != null) {
if (asyncAttributes == null) {
asyncAttributes = new ArrayList<>();
}
String name = entry.getKey();
if (adapter.isMultiValue()) {
asyncAttributes.add(
Flux.from(adapter.toPublisher(value))
.collectList()
.doOnSuccess(result -> model.put(name, result)));
}
else {
asyncAttributes.add(
Mono.from(adapter.toPublisher(value))
.doOnSuccess(result -> {
if (result != null) {
model.put(name, result);
addBindingResult(name, result, model, exchange);
}
else {
model.remove(name);
}
}));
}
}
}
return asyncAttributes != null ? Mono.when(asyncAttributes) : Mono.empty();
}
private void addBindingResult(String name, Object value, Map<String, Object> model, ServerWebExchange exchange) {
BindingContext context = exchange.getAttribute(BINDING_CONTEXT_ATTRIBUTE);
if (context == null || value.getClass().isArray() || value instanceof Collection ||
value instanceof Map || BeanUtils.isSimpleValueType(value.getClass())) {
return;
}
BindingResult result = context.createDataBinder(exchange, value, name).getBindingResult();
model.put(BindingResult.MODEL_KEY_PREFIX + name, result);
}
/**
* Create a {@link RequestContext} to expose under the
* {@linkplain #setRequestContextAttribute specified attribute name}.
* <p>The default implementation creates a standard {@code RequestContext}
* instance for the given exchange and model.
* <p>Can be overridden in subclasses to create custom instances.
* @param exchange the current exchange
* @param model a combined output Map (never {@code null}), with dynamic values
* taking precedence over static attributes
* @return the {@code RequestContext} instance
* @see #setRequestContextAttribute
*/
protected RequestContext createRequestContext(ServerWebExchange exchange, Map<String, Object> model) {
return new RequestContext(exchange, model, obtainApplicationContext(), getRequestDataValueProcessor());
}
/**
* Get the {@link RequestDataValueProcessor} to use.
* <p>The default implementation looks in the {@link #getApplicationContext()
* ApplicationContext} for a {@code RequestDataValueProcessor} bean with
* the name {@link #REQUEST_DATA_VALUE_PROCESSOR_BEAN_NAME}.
* @return the {@code RequestDataValueProcessor}, or {@code null} if there is
* none in the application context
*/
protected @Nullable RequestDataValueProcessor getRequestDataValueProcessor() {
ApplicationContext context = getApplicationContext();
if (context != null && context.containsBean(REQUEST_DATA_VALUE_PROCESSOR_BEAN_NAME)) {
return context.getBean(REQUEST_DATA_VALUE_PROCESSOR_BEAN_NAME, RequestDataValueProcessor.class);
}
return null;
}
/**
* Subclasses must implement this method to actually render the view.
* @param renderAttributes combined output Map (never {@code null}),
* with dynamic values taking precedence over static attributes
* @param contentType the content type selected to render with, which should
* match one of the {@linkplain #getSupportedMediaTypes() supported media types}
* @param exchange current exchange
* @return a {@code Mono} that represents when and if rendering succeeds
*/
protected abstract Mono<Void> renderInternal(Map<String, Object> renderAttributes,
@Nullable MediaType contentType, ServerWebExchange exchange);
@Override
public String toString() {
return getClass().getName() + ": " + formatViewName();
}
protected String formatViewName() {
return (getBeanName() != null ?
"name '" + getBeanName() + "'" : "[" + getClass().getSimpleName() + "]");
}
}
| AbstractView |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawEncoder.java | {
"start": 1186,
"end": 2359
} | class ____ extends AbstractNativeRawEncoder {
static {
ErasureCodeNative.checkNativeCodeLoaded();
}
public NativeXORRawEncoder(ErasureCoderOptions coderOptions) {
super(coderOptions);
encoderLock.writeLock().lock();
try {
initImpl(coderOptions.getNumDataUnits(),
coderOptions.getNumParityUnits());
} finally {
encoderLock.writeLock().unlock();
}
}
@Override
protected void performEncodeImpl(
ByteBuffer[] inputs, int[] inputOffsets, int dataLen,
ByteBuffer[] outputs, int[] outputOffsets) throws IOException {
encodeImpl(inputs, inputOffsets, dataLen, outputs, outputOffsets);
}
@Override
public void release() {
encoderLock.writeLock().lock();
try {
destroyImpl();
} finally {
encoderLock.writeLock().unlock();
}
}
private native void initImpl(int numDataUnits, int numParityUnits);
private native void encodeImpl(ByteBuffer[] inputs, int[] inputOffsets,
int dataLen, ByteBuffer[] outputs,
int[] outputOffsets) throws IOException;
private native void destroyImpl();
}
| NativeXORRawEncoder |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/DiscoverySelectors.java | {
"start": 46587,
"end": 47073
} | class ____ not be null");
Preconditions.notBlank(methodName, "Method name must not be null or blank");
Preconditions.notNull(parameterTypeNames, "Parameter types must not be null");
return new NestedMethodSelector(enclosingClasses, nestedClass, methodName, parameterTypeNames.strip());
}
/**
* Create a {@code NestedMethodSelector} for the supplied enclosing classes,
* nested class, method name, and parameter types.
*
* @param enclosingClasses the path to the nested | must |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/ValueInstantiators.java | {
"start": 2450,
"end": 2659
} | class ____ custom implementations.
* Safer to extend (instead of implementing {@link ValueInstantiators}) in case later
* Jackson versions add new methods in base interface.
*/
public static | for |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/PQCEndpointBuilderFactory.java | {
"start": 14178,
"end": 16112
} | interface ____ {
/**
* PQC Algorithms (camel-pqc)
* Post Quantum Cryptography Signature and Verification component.
*
* Category: security
* Since: 4.12
* Maven coordinates: org.apache.camel:camel-pqc
*
* @return the dsl builder for the headers' name.
*/
default PQCHeaderNameBuilder pqc() {
return PQCHeaderNameBuilder.INSTANCE;
}
/**
* PQC Algorithms (camel-pqc)
* Post Quantum Cryptography Signature and Verification component.
*
* Category: security
* Since: 4.12
* Maven coordinates: org.apache.camel:camel-pqc
*
* Syntax: <code>pqc:label</code>
*
* Path parameter: label (required)
* Logical name
*
* @param path label
* @return the dsl builder
*/
default PQCEndpointBuilder pqc(String path) {
return PQCEndpointBuilderFactory.endpointBuilder("pqc", path);
}
/**
* PQC Algorithms (camel-pqc)
* Post Quantum Cryptography Signature and Verification component.
*
* Category: security
* Since: 4.12
* Maven coordinates: org.apache.camel:camel-pqc
*
* Syntax: <code>pqc:label</code>
*
* Path parameter: label (required)
* Logical name
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path label
* @return the dsl builder
*/
default PQCEndpointBuilder pqc(String componentName, String path) {
return PQCEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the PQC Algorithms component.
*/
public static | PQCBuilders |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/image/ClientQuotaDelta.java | {
"start": 1025,
"end": 2763
} | class ____ {
private final ClientQuotaImage image;
private final Map<String, OptionalDouble> changes = new HashMap<>();
public ClientQuotaDelta(ClientQuotaImage image) {
this.image = image;
}
public Map<String, OptionalDouble> changes() {
return changes;
}
public void finishSnapshot() {
for (String key : image.quotas().keySet()) {
if (!changes.containsKey(key)) {
// If a quota from the image did not appear in the snapshot, mark it as removed.
changes.put(key, OptionalDouble.empty());
}
}
}
public void replay(ClientQuotaRecord record) {
if (record.remove()) {
changes.put(record.key(), OptionalDouble.empty());
} else {
changes.put(record.key(), OptionalDouble.of(record.value()));
}
}
public ClientQuotaImage apply() {
Map<String, Double> newQuotas = new HashMap<>(image.quotas().size());
for (Entry<String, Double> entry : image.quotas().entrySet()) {
OptionalDouble change = changes.get(entry.getKey());
if (change == null) {
newQuotas.put(entry.getKey(), entry.getValue());
} else if (change.isPresent()) {
newQuotas.put(entry.getKey(), change.getAsDouble());
}
}
for (Entry<String, OptionalDouble> entry : changes.entrySet()) {
if (!newQuotas.containsKey(entry.getKey())) {
if (entry.getValue().isPresent()) {
newQuotas.put(entry.getKey(), entry.getValue().getAsDouble());
}
}
}
return new ClientQuotaImage(newQuotas);
}
}
| ClientQuotaDelta |
java | google__guice | core/test/com/google/inject/spi/ProviderMethodsTest.java | {
"start": 5785,
"end": 6490
} | interface ____ {
Foo getFoo();
int getI();
}
@Test
public void testMultipleBindingAnnotations() {
try {
Guice.createInjector(
new AbstractModule() {
@Provides
@Named("A")
@Blue
public String provideString() {
return "a";
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"more than one annotation annotated with @BindingAnnotation:",
"Named",
"Blue",
"at ProviderMethodsTest$5.provideString(ProviderMethodsTest.java:");
}
}
@Retention(RUNTIME)
@BindingAnnotation
@ | Bar |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/cglib/core/AbstractClassGenerator.java | {
"start": 2303,
"end": 2575
} | class ____ {
private final Set<String> reservedClassNames = new HashSet<>();
/**
* {@link AbstractClassGenerator} here holds "cache key" (for example, {@link org.springframework.cglib.proxy.Enhancer}
* configuration), and the value is the generated | ClassLoaderData |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configuration/SecurityReactorContextConfigurationTests.java | {
"start": 3715,
"end": 14531
} | class ____ {
private MockHttpServletRequest servletRequest;
private MockHttpServletResponse servletResponse;
private Authentication authentication;
private SecurityReactorContextConfiguration.SecurityReactorContextSubscriberRegistrar subscriberRegistrar = new SecurityReactorContextConfiguration.SecurityReactorContextSubscriberRegistrar();
public final SpringTestContext spring = new SpringTestContext(this);
@BeforeEach
public void setup() {
this.servletRequest = new MockHttpServletRequest();
this.servletResponse = new MockHttpServletResponse();
this.authentication = new TestingAuthenticationToken("principal", "password");
}
@AfterEach
public void cleanup() {
SecurityContextHolder.clearContext();
RequestContextHolder.resetRequestAttributes();
}
@Test
public void createSubscriberIfNecessaryWhenSubscriberContextContainsSecurityContextAttributesThenReturnOriginalSubscriber() {
Context context = Context.of(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES, new HashMap<>());
BaseSubscriber<Object> originalSubscriber = new BaseSubscriber<Object>() {
@Override
public Context currentContext() {
return context;
}
};
CoreSubscriber<Object> resultSubscriber = this.subscriberRegistrar
.createSubscriberIfNecessary(originalSubscriber);
assertThat(resultSubscriber).isSameAs(originalSubscriber);
}
@Test
public void createSubscriberIfNecessaryWhenWebSecurityContextAvailableThenCreateWithParentContext() {
RequestContextHolder
.setRequestAttributes(new ServletRequestAttributes(this.servletRequest, this.servletResponse));
SecurityContextHolder.getContext().setAuthentication(this.authentication);
String testKey = "test_key";
String testValue = "test_value";
BaseSubscriber<Object> parent = new BaseSubscriber<Object>() {
@Override
public Context currentContext() {
return Context.of(testKey, testValue);
}
};
CoreSubscriber<Object> subscriber = this.subscriberRegistrar.createSubscriberIfNecessary(parent);
Context resultContext = subscriber.currentContext();
assertThat(resultContext.getOrEmpty(testKey)).hasValue(testValue);
Map<Object, Object> securityContextAttributes = resultContext
.getOrDefault(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES, null);
assertThat(securityContextAttributes).hasSize(3);
assertThat(securityContextAttributes).contains(entry(HttpServletRequest.class, this.servletRequest),
entry(HttpServletResponse.class, this.servletResponse),
entry(Authentication.class, this.authentication));
}
@Test
public void createSubscriberIfNecessaryWhenParentContextContainsSecurityContextAttributesThenUseParentContext() {
RequestContextHolder
.setRequestAttributes(new ServletRequestAttributes(this.servletRequest, this.servletResponse));
SecurityContextHolder.getContext().setAuthentication(this.authentication);
Context parentContext = Context.of(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES,
new HashMap<>());
BaseSubscriber<Object> parent = new BaseSubscriber<Object>() {
@Override
public Context currentContext() {
return parentContext;
}
};
CoreSubscriber<Object> subscriber = this.subscriberRegistrar.createSubscriberIfNecessary(parent);
Context resultContext = subscriber.currentContext();
assertThat(resultContext).isSameAs(parentContext);
}
@Test
public void createSubscriberIfNecessaryWhenNotServletRequestAttributesThenStillCreate() {
RequestContextHolder.setRequestAttributes(new RequestAttributes() {
@Override
public Object getAttribute(String name, int scope) {
return null;
}
@Override
public void setAttribute(String name, Object value, int scope) {
}
@Override
public void removeAttribute(String name, int scope) {
}
@Override
public String[] getAttributeNames(int scope) {
return new String[0];
}
@Override
public void registerDestructionCallback(String name, Runnable callback, int scope) {
}
@Override
public Object resolveReference(String key) {
return null;
}
@Override
public String getSessionId() {
return null;
}
@Override
public Object getSessionMutex() {
return null;
}
});
CoreSubscriber<Object> subscriber = this.subscriberRegistrar
.createSubscriberIfNecessary(Operators.emptySubscriber());
assertThat(subscriber).isInstanceOf(SecurityReactorContextConfiguration.SecurityReactorContextSubscriber.class);
}
@Test
public void createPublisherWhenLastOperatorAddedThenSecurityContextAttributesAvailable() {
// Trigger the importing of SecurityReactorContextConfiguration via
// OAuth2ImportSelector
this.spring.register(SecurityConfig.class).autowire();
// Setup for SecurityReactorContextSubscriberRegistrar
RequestContextHolder
.setRequestAttributes(new ServletRequestAttributes(this.servletRequest, this.servletResponse));
SecurityContextHolder.getContext().setAuthentication(this.authentication);
ClientResponse clientResponseOk = ClientResponse.create(HttpStatus.OK).build();
// @formatter:off
ExchangeFilterFunction filter = (req, next) -> Mono.deferContextual(Mono::just)
.filter((ctx) -> ctx.hasKey(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES))
.map((ctx) -> ctx.get(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES))
.cast(Map.class)
.map((attributes) -> {
if (attributes.containsKey(HttpServletRequest.class)
&& attributes.containsKey(HttpServletResponse.class)
&& attributes.containsKey(Authentication.class)) {
return clientResponseOk;
}
else {
return ClientResponse.create(HttpStatus.NOT_FOUND).build();
}
});
// @formatter:on
ClientRequest clientRequest = ClientRequest.create(HttpMethod.GET, URI.create("https://example.com")).build();
MockExchangeFunction exchange = new MockExchangeFunction();
Map<Object, Object> expectedContextAttributes = new HashMap<>();
expectedContextAttributes.put(HttpServletRequest.class, this.servletRequest);
expectedContextAttributes.put(HttpServletResponse.class, this.servletResponse);
expectedContextAttributes.put(Authentication.class, this.authentication);
Mono<ClientResponse> clientResponseMono = filter.filter(clientRequest, exchange)
.flatMap((response) -> filter.filter(clientRequest, exchange));
// @formatter:off
StepVerifier.create(clientResponseMono)
.expectAccessibleContext()
.contains(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES, expectedContextAttributes)
.then()
.expectNext(clientResponseOk)
.verifyComplete();
// @formatter:on
}
@Test
public void createPublisherWhenCustomSecurityContextHolderStrategyThenUses() {
this.spring.register(SecurityConfig.class, SecurityContextChangedListenerConfig.class).autowire();
SecurityContextHolderStrategy strategy = this.spring.getContext().getBean(SecurityContextHolderStrategy.class);
strategy.getContext().setAuthentication(this.authentication);
ClientResponse clientResponseOk = ClientResponse.create(HttpStatus.OK).build();
// @formatter:off
ExchangeFilterFunction filter = (req, next) -> Mono.deferContextual(Mono::just)
.filter((ctx) -> ctx.hasKey(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES))
.map((ctx) -> ctx.get(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES))
.cast(Map.class)
.map((attributes) -> clientResponseOk);
// @formatter:on
ClientRequest clientRequest = ClientRequest.create(HttpMethod.GET, URI.create("https://example.com")).build();
MockExchangeFunction exchange = new MockExchangeFunction();
Map<Object, Object> expectedContextAttributes = new HashMap<>();
expectedContextAttributes.put(HttpServletRequest.class, null);
expectedContextAttributes.put(HttpServletResponse.class, null);
expectedContextAttributes.put(Authentication.class, this.authentication);
Mono<ClientResponse> clientResponseMono = filter.filter(clientRequest, exchange)
.flatMap((response) -> filter.filter(clientRequest, exchange));
// @formatter:off
StepVerifier.create(clientResponseMono)
.expectAccessibleContext()
.contains(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES, expectedContextAttributes)
.then()
.expectNext(clientResponseOk)
.verifyComplete();
// @formatter:on
verify(strategy, times(2)).getContext();
}
@Test
public void createPublisherWhenThreadFactoryIsPlatformThenSecurityContextAttributesAvailable() throws Exception {
this.spring.register(SecurityConfig.class).autowire();
ThreadFactory threadFactory = Executors.defaultThreadFactory();
assertContextAttributesAvailable(threadFactory);
}
@Test
@DisabledOnJre(JRE.JAVA_17)
public void createPublisherWhenThreadFactoryIsVirtualThenSecurityContextAttributesAvailable() throws Exception {
this.spring.register(SecurityConfig.class).autowire();
ThreadFactory threadFactory = new VirtualThreadTaskExecutor().getVirtualThreadFactory();
assertContextAttributesAvailable(threadFactory);
}
private void assertContextAttributesAvailable(ThreadFactory threadFactory) throws Exception {
Map<Object, Object> expectedContextAttributes = new HashMap<>();
expectedContextAttributes.put(HttpServletRequest.class, this.servletRequest);
expectedContextAttributes.put(HttpServletResponse.class, this.servletResponse);
expectedContextAttributes.put(Authentication.class, this.authentication);
try (SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(threadFactory)) {
Future<Map<Object, Object>> future = taskExecutor.submit(this::propagateRequestAttributes);
assertThat(future.get()).isEqualTo(expectedContextAttributes);
}
}
private Map<Object, Object> propagateRequestAttributes() {
RequestAttributes requestAttributes = new ServletRequestAttributes(this.servletRequest, this.servletResponse);
RequestContextHolder.setRequestAttributes(requestAttributes);
SecurityContext securityContext = SecurityContextHolder.createEmptyContext();
securityContext.setAuthentication(this.authentication);
SecurityContextHolder.setContext(securityContext);
// @formatter:off
return Mono.deferContextual(Mono::just)
.filter((ctx) -> ctx.hasKey(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES))
.map((ctx) -> ctx.<Map<Object, Object>>get(SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES))
.map((attributes) -> {
Map<Object, Object> map = new HashMap<>();
// Copy over items from lazily loaded map
Arrays.asList(HttpServletRequest.class, HttpServletResponse.class, Authentication.class)
.forEach((key) -> map.put(key, attributes.get(key)));
return map;
})
.block();
// @formatter:on
}
@Configuration
@EnableWebSecurity
static | SecurityReactorContextConfigurationTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java | {
"start": 2482,
"end": 3378
} | class ____ {
public static final long RETAIN_ALL = -1;
public static final ActionType<ActionResponse.Empty> ADD = new ActionType<>("indices:admin/seq_no/add_retention_lease");
public static final ActionType<ActionResponse.Empty> RENEW = new ActionType<>("indices:admin/seq_no/renew_retention_lease");
public static final ActionType<ActionResponse.Empty> REMOVE = new ActionType<>("indices:admin/seq_no/remove_retention_lease");
public static final RemoteClusterActionType<ActionResponse.Empty> REMOTE_ADD = RemoteClusterActionType.emptyResponse(ADD.name());
public static final RemoteClusterActionType<ActionResponse.Empty> REMOTE_RENEW = RemoteClusterActionType.emptyResponse(RENEW.name());
public static final RemoteClusterActionType<ActionResponse.Empty> REMOTE_REMOVE = RemoteClusterActionType.emptyResponse(REMOVE.name());
abstract static | RetentionLeaseActions |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/BrowseEndpointBuilderFactory.java | {
"start": 19702,
"end": 20019
} | class ____ extends AbstractEndpointBuilder implements BrowseEndpointBuilder, AdvancedBrowseEndpointBuilder {
public BrowseEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new BrowseEndpointBuilderImpl(path);
}
} | BrowseEndpointBuilderImpl |
java | quarkusio__quarkus | integration-tests/reactive-messaging-pulsar/src/test/java/io/quarkus/it/pulsar/HelloWorldTestIT.java | {
"start": 116,
"end": 378
} | class ____ extends HelloWorldTest {
public int getPort() {
// In native mode, the port is not injected, so we need to retrieve it from the config
// I don't know how to set this up without hardcoding
return 9000;
}
}
| HelloWorldTestIT |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.