language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/hierarchies/standard/ClassHierarchyWithMergedConfigLevelOneTests.java | {
"start": 1862,
"end": 1961
} | class ____ {
@Bean
String parent() {
return "parent";
}
}
@Configuration
static | AppConfig |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/log/LogComponentCustomizeTwoTest.java | {
"start": 1057,
"end": 2010
} | class ____ extends ContextTestSupport {
private final LogCustomFormatterTest.TestExchangeFormatter formatter = new LogCustomFormatterTest.TestExchangeFormatter();
@Test
public void testCustomize() throws Exception {
Assertions.assertEquals(0, formatter.getCounter());
template.sendBody("direct:start", "Hello World");
Assertions.assertEquals(1, formatter.getCounter());
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// customize the log component using java lambda style
customize("log", LogComponent.class, l -> {
l.setExchangeFormatter(formatter);
});
from("direct:start")
.to("log:foo");
}
};
}
}
| LogComponentCustomizeTwoTest |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldHaveSameContent.java | {
"start": 1022,
"end": 4054
} | class ____ extends AbstractShouldHaveTextContent {
/**
* Creates a new <code>{@link ShouldHaveSameContent}</code>.
* @param actual the actual file in the failed assertion.
* @param expected the expected file in the failed assertion.
* @param diffs the differences between {@code actual} and {@code expected}.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldHaveSameContent(File actual, File expected, List<Delta<String>> diffs) {
return new ShouldHaveSameContent(actual, expected, diffsAsString(diffs));
}
/**
* Creates a new <code>{@link ShouldHaveSameContent}</code>.
* @param actual the actual InputStream in the failed assertion.
* @param expected the expected InputStream in the failed assertion.
* @param diffs the differences between {@code actual} and {@code expected}.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldHaveSameContent(InputStream actual, InputStream expected, List<Delta<String>> diffs) {
return new ShouldHaveSameContent(actual, expected, diffsAsString(diffs));
}
/**
* Creates a new <code>{@link ShouldHaveSameContent}</code>.
* @param actual the actual InputStream in the failed assertion.
* @param expected the expected String in the failed assertion.
* @param diffs the differences between {@code actual} and {@code expected}.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldHaveSameContent(InputStream actual, String expected, List<Delta<String>> diffs) {
return new ShouldHaveSameContent(actual, expected, diffsAsString(diffs));
}
/**
* Creates a new <code>{@link ShouldHaveSameContent}</code>.
* @param actual the actual Path in the failed assertion.
* @param expected the expected Path in the failed assertion.
* @param diffs the differences between {@code actual} and {@code expected}.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldHaveSameContent(Path actual, Path expected, List<Delta<String>> diffs) {
return new ShouldHaveSameContent(actual, expected, diffsAsString(diffs));
}
private ShouldHaveSameContent(File actual, File expected, String diffs) {
super("%nFile:%n %s%nand file:%n %s%ndo not have same content:%n%n", actual, expected);
this.diffs = diffs;
}
private ShouldHaveSameContent(InputStream actual, InputStream expected, String diffs) {
super("%nInputStreams do not have same content:%n%n", actual, expected);
this.diffs = diffs;
}
private ShouldHaveSameContent(InputStream actual, String expected, String diffs) {
super("%nInputStream does not have same content as String:%n%n", actual, expected);
this.diffs = diffs;
}
private ShouldHaveSameContent(Path actual, Path expected, String diffs) {
super("%nPath:%n %s%nand path:%n %s%ndo not have same content:%n%n", actual, expected);
this.diffs = diffs;
}
}
| ShouldHaveSameContent |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/java8stream/erroneous/ErroneousStreamMappingTest.java | {
"start": 1416,
"end": 8512
} | class ____ {
@ProcessorTest
@WithClasses({ ErroneousStreamToNonStreamMapper.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousStreamToNonStreamMapper.class,
kind = Kind.ERROR,
line = 15,
message = "Can't generate mapping method from iterable type from java stdlib to non-iterable type."),
@Diagnostic(type = ErroneousStreamToNonStreamMapper.class,
kind = Kind.ERROR,
line = 17,
message = "Can't generate mapping method from non-iterable type to iterable type from java stdlib.")
}
)
public void shouldFailToGenerateImplementationBetweenStreamAndNonStreamOrIterable() {
}
@ProcessorTest
@WithClasses({ ErroneousStreamToPrimitivePropertyMapper.class, Source.class, Target.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousStreamToPrimitivePropertyMapper.class,
kind = Kind.ERROR,
line = 13,
message = "Can't map property \"Stream<String> strings\" to \"int strings\". " +
"Consider to declare/implement a mapping method: \"int map(Stream<String> value)\".")
}
)
public void shouldFailToGenerateImplementationBetweenCollectionAndPrimitive() {
}
@ProcessorTest
@WithClasses({ EmptyStreamMappingMapper.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = EmptyStreamMappingMapper.class,
kind = Kind.ERROR,
line = 23,
message = "'nullValueMappingStrategy','dateformat', 'qualifiedBy' and 'elementTargetType' are "
+ "undefined in @IterableMapping, define at least one of them."),
@Diagnostic(type = EmptyStreamMappingMapper.class,
kind = Kind.ERROR,
line = 26,
message = "'nullValueMappingStrategy','dateformat', 'qualifiedBy' and 'elementTargetType' are "
+ "undefined in @IterableMapping, define at least one of them."),
@Diagnostic(type = EmptyStreamMappingMapper.class,
kind = Kind.ERROR,
line = 29,
message = "'nullValueMappingStrategy','dateformat', 'qualifiedBy' and 'elementTargetType' are "
+ "undefined in @IterableMapping, define at least one of them.")
}
)
public void shouldFailOnEmptyIterableAnnotationStreamMappings() {
}
@ProcessorTest
@WithClasses({ ErroneousStreamToStreamNoElementMappingFound.class, NoProperties.class, WithProperties.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousStreamToStreamNoElementMappingFound.class,
kind = Kind.ERROR,
line = 24,
message = "No target bean properties found: " +
"can't map Stream element \"WithProperties withProperties\" to \"NoProperties noProperties\". " +
"Consider to declare/implement a mapping method: \"NoProperties map(WithProperties value)\".")
}
)
public void shouldFailOnNoElementMappingFoundForStreamToStream() {
}
@ProcessorTest
@IssueKey("993")
@WithClasses({ ErroneousStreamToStreamNoElementMappingFoundDisabledAuto.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousStreamToStreamNoElementMappingFoundDisabledAuto.class,
kind = Kind.ERROR,
line = 19,
message = "Can't map stream element \"AttributedString\" to \"String \". " +
"Consider to declare/implement a mapping method: \"String map(AttributedString value)\".")
}
)
public void shouldFailOnNoElementMappingFoundForStreamToStreamWithDisabledAuto() {
}
@ProcessorTest
@WithClasses({ ErroneousListToStreamNoElementMappingFound.class, NoProperties.class, WithProperties.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousListToStreamNoElementMappingFound.class,
kind = Kind.ERROR,
line = 25,
message = "No target bean properties found: " +
"can't map Stream element \"WithProperties withProperties\" to \"NoProperties noProperties\". " +
"Consider to declare/implement a mapping method: \"NoProperties map(WithProperties value)\".")
}
)
public void shouldFailOnNoElementMappingFoundForListToStream() {
}
@ProcessorTest
@IssueKey("993")
@WithClasses({ ErroneousListToStreamNoElementMappingFoundDisabledAuto.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousListToStreamNoElementMappingFoundDisabledAuto.class,
kind = Kind.ERROR,
line = 20,
message = "Can't map stream element \"AttributedString\" to \"String \". " +
"Consider to declare/implement a mapping method: \"String map(AttributedString value)\".")
}
)
public void shouldFailOnNoElementMappingFoundForListToStreamWithDisabledAuto() {
}
@ProcessorTest
@WithClasses({ ErroneousStreamToListNoElementMappingFound.class, NoProperties.class, WithProperties.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousStreamToListNoElementMappingFound.class,
kind = Kind.ERROR,
line = 25,
message = "No target bean properties found: " +
"can't map Stream element \"WithProperties withProperties\" to \"NoProperties noProperties\". " +
"Consider to declare/implement a mapping method: \"NoProperties map(WithProperties value)\".")
}
)
public void shouldFailOnNoElementMappingFoundForStreamToList() {
}
@ProcessorTest
@IssueKey("993")
@WithClasses({ ErroneousStreamToListNoElementMappingFoundDisabledAuto.class })
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousStreamToListNoElementMappingFoundDisabledAuto.class,
kind = Kind.ERROR,
line = 20,
message = "Can't map stream element \"AttributedString\" to \"String \". " +
"Consider to declare/implement a mapping method: \"String map(AttributedString value)\".")
}
)
public void shouldFailOnNoElementMappingFoundForStreamToListWithDisabledAuto() {
}
}
| ErroneousStreamMappingTest |
java | spring-projects__spring-boot | integration-test/spring-boot-actuator-integration-tests/src/test/java/org/springframework/boot/actuate/autoconfigure/endpoint/jmx/JmxEndpointAccessIntegrationTests.java | {
"start": 1923,
"end": 6891
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(JmxAutoConfiguration.class, EndpointAutoConfiguration.class,
JmxEndpointAutoConfiguration.class, HealthContributorAutoConfiguration.class))
.withUserConfiguration(CustomJmxEndpoint.class)
.withPropertyValues("spring.jmx.enabled=true")
.withConfiguration(AutoConfigurations.of(BeansEndpointAutoConfiguration.class));
@Test
void accessIsUnrestrictedByDefault() {
this.contextRunner.withPropertyValues("management.endpoints.jmx.exposure.include=*").run((context) -> {
MBeanServer mBeanServer = context.getBean(MBeanServer.class);
assertThat(hasOperation(mBeanServer, "beans", "beans")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "read")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "write")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "delete")).isTrue();
});
}
@Test
void accessCanBeReadOnlyByDefault() {
this.contextRunner
.withPropertyValues("management.endpoints.jmx.exposure.include=*",
"management.endpoints.access.default=READ_ONLY")
.run((context) -> {
MBeanServer mBeanServer = context.getBean(MBeanServer.class);
assertThat(hasOperation(mBeanServer, "beans", "beans")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "read")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "write")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "delete")).isFalse();
});
}
@Test
void accessCanBeNoneByDefault() {
this.contextRunner
.withPropertyValues("management.endpoints.jmx.exposure.include=*",
"management.endpoints.access.default=NONE")
.run((context) -> {
MBeanServer mBeanServer = context.getBean(MBeanServer.class);
assertThat(hasOperation(mBeanServer, "beans", "beans")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "read")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "write")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "delete")).isFalse();
});
}
@Test
void accessForOneEndpointCanOverrideTheDefaultAccess() {
this.contextRunner
.withPropertyValues("management.endpoints.jmx.exposure.include=*",
"management.endpoints.access.default=NONE", "management.endpoint.customjmx.access=UNRESTRICTED")
.run((context) -> {
MBeanServer mBeanServer = context.getBean(MBeanServer.class);
assertThat(hasOperation(mBeanServer, "beans", "beans")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "read")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "write")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "delete")).isTrue();
});
}
@Test
void accessCanBeCappedAtReadOnly() {
this.contextRunner
.withPropertyValues("management.endpoints.jmx.exposure.include=*",
"management.endpoints.access.default=UNRESTRICTED",
"management.endpoints.access.max-permitted=READ_ONLY")
.run((context) -> {
MBeanServer mBeanServer = context.getBean(MBeanServer.class);
assertThat(hasOperation(mBeanServer, "beans", "beans")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "read")).isTrue();
assertThat(hasOperation(mBeanServer, "customjmx", "write")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "delete")).isFalse();
});
}
@Test
void accessCanBeCappedAtNone() {
this.contextRunner.withPropertyValues("management.endpoints.jmx.exposure.include=*",
"management.endpoints.access.default=UNRESTRICTED", "management.endpoints.access.max-permitted=NONE")
.run((context) -> {
MBeanServer mBeanServer = context.getBean(MBeanServer.class);
assertThat(hasOperation(mBeanServer, "beans", "beans")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "read")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "write")).isFalse();
assertThat(hasOperation(mBeanServer, "customjmx", "delete")).isFalse();
});
}
private ObjectName getDefaultObjectName(String endpointId) {
return getObjectName("org.springframework.boot", endpointId);
}
private ObjectName getObjectName(String domain, String endpointId) {
try {
return new ObjectName(
String.format("%s:type=Endpoint,name=%s", domain, StringUtils.capitalize(endpointId)));
}
catch (MalformedObjectNameException ex) {
throw new IllegalStateException("Invalid object name", ex);
}
}
private boolean hasOperation(MBeanServer mbeanServer, String endpoint, String operationName) {
try {
for (MBeanOperationInfo operation : mbeanServer.getMBeanInfo(getDefaultObjectName(endpoint))
.getOperations()) {
if (operation.getName().equals(operationName)) {
return true;
}
}
}
catch (Exception ex) {
// Continue
}
return false;
}
@JmxEndpoint(id = "customjmx")
static | JmxEndpointAccessIntegrationTests |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/RocksDBWriteBatchWrapper.java | {
"start": 1547,
"end": 6952
} | class ____ implements AutoCloseable {
private static final int MIN_CAPACITY = 100;
private static final int MAX_CAPACITY = 1000;
private static final int PER_RECORD_BYTES = 100;
// default 0 for disable memory size based flush
private static final long DEFAULT_BATCH_SIZE = 0;
private static final int DEFAULT_CANCELLATION_CHECK_INTERVAL = MIN_CAPACITY;
private static final int DEFAULT_CANCELLATION_CHECK_INTERVAL_BYTES =
DEFAULT_CANCELLATION_CHECK_INTERVAL * PER_RECORD_BYTES;
private final RocksDB db;
private final WriteBatch batch;
private final WriteOptions options;
private final int capacity;
@Nonnegative private final long batchSize;
/** List of all objects that we need to close in close(). */
private final List<AutoCloseable> toClose;
private volatile boolean cancelled;
private final int cancellationCheckInterval;
private final long cancellationCheckIntervalBytes;
private long lastCancellationCheckBatchSize = 0L;
public RocksDBWriteBatchWrapper(@Nonnull RocksDB rocksDB, long writeBatchSize) {
this(rocksDB, null, 500, writeBatchSize);
}
public RocksDBWriteBatchWrapper(@Nonnull RocksDB rocksDB, @Nullable WriteOptions options) {
this(rocksDB, options, 500, DEFAULT_BATCH_SIZE);
}
public RocksDBWriteBatchWrapper(
@Nonnull RocksDB rocksDB, @Nullable WriteOptions options, long batchSize) {
this(rocksDB, options, 500, batchSize);
}
public RocksDBWriteBatchWrapper(
@Nonnull RocksDB rocksDB,
@Nullable WriteOptions options,
int capacity,
long batchSize) {
this(
rocksDB,
options,
capacity,
batchSize,
DEFAULT_CANCELLATION_CHECK_INTERVAL,
DEFAULT_CANCELLATION_CHECK_INTERVAL_BYTES);
}
public RocksDBWriteBatchWrapper(
@Nonnull RocksDB rocksDB,
@Nullable WriteOptions options,
int capacity,
long batchSize,
int cancellationCheckInterval,
long cancellationCheckIntervalBytes) {
Preconditions.checkArgument(
capacity >= MIN_CAPACITY && capacity <= MAX_CAPACITY,
"capacity should be between " + MIN_CAPACITY + " and " + MAX_CAPACITY);
Preconditions.checkArgument(batchSize >= 0, "Max batch size have to be no negative.");
this.db = rocksDB;
this.capacity = capacity;
this.batchSize = batchSize;
this.toClose = new ArrayList<>(2);
if (this.batchSize > 0) {
this.batch =
new WriteBatch(
(int) Math.min(this.batchSize, this.capacity * PER_RECORD_BYTES));
} else {
this.batch = new WriteBatch(this.capacity * PER_RECORD_BYTES);
}
this.toClose.add(this.batch);
if (options != null) {
this.options = options;
} else {
// Use default write options with disabled WAL
this.options = new WriteOptions().setDisableWAL(true);
// We own this object, so we must ensure that we close it.
this.toClose.add(this.options);
}
this.cancellationCheckInterval = cancellationCheckInterval;
this.cancellationCheckIntervalBytes = cancellationCheckIntervalBytes;
}
public void put(@Nonnull ColumnFamilyHandle handle, @Nonnull byte[] key, @Nonnull byte[] value)
throws RocksDBException {
maybeEnsureNotCancelled();
batch.put(handle, key, value);
flushIfNeeded();
}
private void maybeEnsureNotCancelled() {
if (batch.count() % cancellationCheckInterval == 0
|| batch.getDataSize() - lastCancellationCheckBatchSize
>= cancellationCheckIntervalBytes) {
ensureNotCancelled();
}
}
public void remove(@Nonnull ColumnFamilyHandle handle, @Nonnull byte[] key)
throws RocksDBException {
batch.delete(handle, key);
flushIfNeeded();
}
public void flush() throws RocksDBException {
ensureNotCancelled();
db.write(options, batch);
batch.clear();
lastCancellationCheckBatchSize = 0;
}
@VisibleForTesting
WriteOptions getOptions() {
return options;
}
public void markCancelled() {
this.cancelled = true;
}
public Closeable getCancelCloseable() {
return this::markCancelled;
}
@Override
public void close() throws RocksDBException {
try {
ensureNotCancelled();
if (batch.count() != 0) {
flush();
}
} finally {
IOUtils.closeAllQuietly(toClose);
}
}
private void ensureNotCancelled() {
if (cancelled) {
throw new CancelTaskException();
}
lastCancellationCheckBatchSize = batch.getDataSize();
}
private void flushIfNeeded() throws RocksDBException {
boolean needFlush =
batch.count() == capacity || (batchSize > 0 && getDataSize() >= batchSize);
if (needFlush) {
flush();
}
}
@VisibleForTesting
long getDataSize() {
return batch.getDataSize();
}
}
| RocksDBWriteBatchWrapper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/DiscriminatorOptionsNotNullSingleTableTest.java | {
"start": 3397,
"end": 4132
} | class ____ {
@Id
private Long id;
private String owner;
private BigDecimal balance;
private BigDecimal interestRate;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
public BigDecimal getBalance() {
return balance;
}
public void setBalance(BigDecimal balance) {
this.balance = balance;
}
public BigDecimal getInterestRate() {
return interestRate;
}
public void setInterestRate(BigDecimal interestRate) {
this.interestRate = interestRate;
}
}
@Entity(name = "DebitAccount")
@DiscriminatorValue("Debit")
public static | Account |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/WebMvcAutoConfigurationTests.java | {
"start": 58499,
"end": 58632
} | class ____ extends ConfigurableWebBindingInitializer {
}
@Configuration(proxyBeanMethods = false)
static | CustomWebBindingInitializer |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/QueryClient.java | {
"start": 616,
"end": 733
} | interface ____ to decouple listener consumers from the stateful classes holding client-references and co.
*/
public | used |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/spdy/DefaultSpdySettingsFrame.java | {
"start": 860,
"end": 4445
} | class ____ implements SpdySettingsFrame {
private boolean clear;
private final Map<Integer, Setting> settingsMap = new TreeMap<Integer, Setting>();
@Override
public Set<Integer> ids() {
return settingsMap.keySet();
}
@Override
public boolean isSet(int id) {
return settingsMap.containsKey(id);
}
@Override
public int getValue(int id) {
final Setting setting = settingsMap.get(id);
return setting != null ? setting.getValue() : -1;
}
@Override
public SpdySettingsFrame setValue(int id, int value) {
return setValue(id, value, false, false);
}
@Override
public SpdySettingsFrame setValue(int id, int value, boolean persistValue, boolean persisted) {
if (id < 0 || id > SpdyCodecUtil.SPDY_SETTINGS_MAX_ID) {
throw new IllegalArgumentException("Setting ID is not valid: " + id);
}
final Integer key = Integer.valueOf(id);
final Setting setting = settingsMap.get(key);
if (setting != null) {
setting.setValue(value);
setting.setPersist(persistValue);
setting.setPersisted(persisted);
} else {
settingsMap.put(key, new Setting(value, persistValue, persisted));
}
return this;
}
@Override
public SpdySettingsFrame removeValue(int id) {
settingsMap.remove(id);
return this;
}
@Override
public boolean isPersistValue(int id) {
final Setting setting = settingsMap.get(id);
return setting != null && setting.isPersist();
}
@Override
public SpdySettingsFrame setPersistValue(int id, boolean persistValue) {
final Setting setting = settingsMap.get(id);
if (setting != null) {
setting.setPersist(persistValue);
}
return this;
}
@Override
public boolean isPersisted(int id) {
final Setting setting = settingsMap.get(id);
return setting != null && setting.isPersisted();
}
@Override
public SpdySettingsFrame setPersisted(int id, boolean persisted) {
final Setting setting = settingsMap.get(id);
if (setting != null) {
setting.setPersisted(persisted);
}
return this;
}
@Override
public boolean clearPreviouslyPersistedSettings() {
return clear;
}
@Override
public SpdySettingsFrame setClearPreviouslyPersistedSettings(boolean clear) {
this.clear = clear;
return this;
}
private Set<Map.Entry<Integer, Setting>> getSettings() {
return settingsMap.entrySet();
}
private void appendSettings(StringBuilder buf) {
for (Map.Entry<Integer, Setting> e: getSettings()) {
Setting setting = e.getValue();
buf.append("--> ");
buf.append(e.getKey());
buf.append(':');
buf.append(setting.getValue());
buf.append(" (persist value: ");
buf.append(setting.isPersist());
buf.append("; persisted: ");
buf.append(setting.isPersisted());
buf.append(')');
buf.append(StringUtil.NEWLINE);
}
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder()
.append(StringUtil.simpleClassName(this))
.append(StringUtil.NEWLINE);
appendSettings(buf);
buf.setLength(buf.length() - StringUtil.NEWLINE.length());
return buf.toString();
}
private static final | DefaultSpdySettingsFrame |
java | google__gson | gson/src/test/java/com/google/gson/functional/MoreSpecificTypeSerializationTest.java | {
"start": 5448,
"end": 5836
} | class ____ {
Collection<ParameterizedBase<String>> collection;
Map<String, ParameterizedBase<String>> map;
ClassWithContainersOfParameterizedBaseFields(
Collection<ParameterizedBase<String>> collection,
Map<String, ParameterizedBase<String>> map) {
this.collection = collection;
this.map = map;
}
}
}
| ClassWithContainersOfParameterizedBaseFields |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/injectionstrategy/spring/annotateWith/SpringAnnotateWithMapperTest.java | {
"start": 1009,
"end": 2908
} | class ____ {
@RegisterExtension
final GeneratedSource generatedSource = new GeneratedSource();
@ProcessorTest
public void shouldHaveComponentAnnotatedQualifiedMapper() {
// then
generatedSource.forMapper( CustomerSpringComponentQualifiedMapper.class )
.content()
.contains( "@Component(value = \"AnnotateWithComponent\")" )
.doesNotContain( "@Component" + System.lineSeparator() );
}
@ProcessorTest
public void shouldHaveControllerAnnotatedQualifiedMapper() {
// then
generatedSource.forMapper( CustomerSpringControllerQualifiedMapper.class )
.content()
.contains( "@Controller(value = \"AnnotateWithController\")" )
.doesNotContain( "@Component" );
}
@ProcessorTest
public void shouldHaveServiceAnnotatedQualifiedMapper() {
// then
generatedSource.forMapper( CustomerSpringServiceQualifiedMapper.class )
.content()
.contains( "@Service(value = \"AnnotateWithService\")" )
.doesNotContain( "@Component" );
}
@ProcessorTest
public void shouldHaveRepositoryAnnotatedQualifiedMapper() {
// then
generatedSource.forMapper( CustomerSpringRepositoryQualifiedMapper.class )
.content()
.contains( "@Repository(value = \"AnnotateWithRepository\")" )
.doesNotContain( "@Component" );
}
@ProcessorTest
public void shouldHaveCustomStereotypeAnnotatedQualifiedMapper() {
// then
generatedSource.forMapper( CustomerSpringCustomStereotypeQualifiedMapper.class )
.content()
.contains( "@CustomStereotype(value = \"AnnotateWithCustomStereotype\")" )
.doesNotContain( "@Component" );
}
}
| SpringAnnotateWithMapperTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobmaster/TestingJobMasterServiceProcess.java | {
"start": 3511,
"end": 6333
} | class ____ {
private UUID leaderSessionId = UUID.randomUUID();
private Supplier<CompletableFuture<Void>> closeAsyncSupplier =
FutureUtils::completedVoidFuture;
private Supplier<Boolean> isInitializedAndRunningSupplier = unsupportedOperation();
private Supplier<CompletableFuture<JobMasterGateway>> getJobMasterGatewayFutureSupplier =
() ->
CompletableFuture.completedFuture(
new TestingJobMasterGatewayBuilder().build());
private Supplier<CompletableFuture<JobManagerRunnerResult>> getResultFutureSupplier =
CompletableFuture::new;
private Supplier<CompletableFuture<String>> getLeaderAddressFutureSupplier =
() -> CompletableFuture.completedFuture("leader address");
private static <T> Supplier<T> unsupportedOperation() {
return () -> {
throw new UnsupportedOperationException();
};
}
public Builder setCloseAsyncSupplier(Supplier<CompletableFuture<Void>> closeAsyncSupplier) {
this.closeAsyncSupplier = closeAsyncSupplier;
return this;
}
public Builder setIsInitializedAndRunningSupplier(
Supplier<Boolean> isInitializedAndRunningSupplier) {
this.isInitializedAndRunningSupplier = isInitializedAndRunningSupplier;
return this;
}
public Builder setGetJobMasterGatewayFutureSupplier(
Supplier<CompletableFuture<JobMasterGateway>> getJobMasterGatewayFutureSupplier) {
this.getJobMasterGatewayFutureSupplier = getJobMasterGatewayFutureSupplier;
return this;
}
public Builder setGetResultFutureSupplier(
Supplier<CompletableFuture<JobManagerRunnerResult>> getResultFutureSupplier) {
this.getResultFutureSupplier = getResultFutureSupplier;
return this;
}
public Builder setGetLeaderAddressFutureSupplier(
Supplier<CompletableFuture<String>> getLeaderAddressFutureSupplier) {
this.getLeaderAddressFutureSupplier = getLeaderAddressFutureSupplier;
return this;
}
public Builder setLeaderSessionId(UUID leaderSessionId) {
this.leaderSessionId = leaderSessionId;
return this;
}
public TestingJobMasterServiceProcess build() {
return new TestingJobMasterServiceProcess(
leaderSessionId,
closeAsyncSupplier,
isInitializedAndRunningSupplier,
getJobMasterGatewayFutureSupplier,
getResultFutureSupplier,
getLeaderAddressFutureSupplier);
}
}
}
| Builder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/ondeletecascade/OnDeleteManyToOneTest.java | {
"start": 2771,
"end": 2867
} | class ____ {
@Id
long id;
@ManyToOne
@OnDelete(action = CASCADE)
Parent parent;
}
}
| Child |
java | google__dagger | javatests/dagger/hilt/android/testsubpackage/PackagePrivateConstructorTestClasses.java | {
"start": 1294,
"end": 1724
} | class ____ extends LinearLayout {
public BaseView(Context context) {
super(context);
}
public BaseView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public BaseView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
BaseView(Context context, int unused) {
super(context);
}
}
public abstract static | BaseView |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/task/BlockingTask.java | {
"start": 1083,
"end": 3495
} | interface ____ extends Task {
/**
* Run the task
*
* @param camelContext the camel context
* @param predicate the task as a predicate. The result of the predicate is used to check if the task
* has completed or not. The predicate must return true if the execution has
* completed or false otherwise. Failures on the task should be handled on the
* predicate using the payload as wrapper for In/Out if necessary
* @param payload a payload to be passed to the task
* @param <T> The type of the payload passed to the predicate when testing the task
* @throws TaskRunFailureException is thrown to provide information why the last run failed, but the task should
* keep continue
* @return true if the task has completed successfully or false if: 1) the budget is
* exhausted or 2) the task was interrupted.
*/
default <T> boolean run(CamelContext camelContext, Predicate<T> predicate, T payload) throws TaskRunFailureException {
return this.run(camelContext, () -> predicate.test(payload));
}
/**
* Run the task
*
* @param camelContext the camel context
* @param supplier the task as a boolean supplier. The result is used to check if the task has
* completed or not. The supplier must return true if the execution has completed or
* false otherwise.
* @throws TaskRunFailureException is thrown to provide information why the last run failed, but the task should
* keep continue
* @return true if the task has completed successfully or false if: 1) the budget is
* exhausted or 2) the task was interrupted.
*/
boolean run(CamelContext camelContext, BooleanSupplier supplier) throws TaskRunFailureException;
/**
* Whether the task has been submitted for running (the state of the task can be waiting for next run etc).
*
* @return true if the run method has been called.
*/
boolean isRunning();
}
| BlockingTask |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/event/EventListenerMethodProcessor.java | {
"start": 8224,
"end": 8614
} | class ____ is not annotated as a user or test {@link Component}...
* which indicates that there is no {@link EventListener} to be found there.
* @since 5.1
*/
private static boolean isSpringContainerClass(Class<?> clazz) {
return (clazz.getName().startsWith("org.springframework.") &&
!AnnotatedElementUtils.isAnnotated(ClassUtils.getUserClass(clazz), Component.class));
}
}
| that |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/AbstractClientStream.java | {
"start": 7325,
"end": 16498
} | class ____ extends AbstractStream.TransportState {
/** Whether listener.closed() has been called. */
private final StatsTraceContext statsTraceCtx;
private boolean listenerClosed;
private ClientStreamListener listener;
private boolean fullStreamDecompression;
private DecompressorRegistry decompressorRegistry = DecompressorRegistry.getDefaultInstance();
private boolean deframerClosed = false;
private Runnable deframerClosedTask;
/** Whether the client has half-closed the stream. */
private volatile boolean outboundClosed;
/**
* Whether the stream is closed from the transport's perspective. This can differ from {@link
* #listenerClosed} because there may still be messages buffered to deliver to the application.
*/
private boolean statusReported;
/** True if the status reported (set via {@link #transportReportStatus}) is OK. */
private boolean statusReportedIsOk;
protected TransportState(
int maxMessageSize,
StatsTraceContext statsTraceCtx,
TransportTracer transportTracer,
CallOptions options) {
super(maxMessageSize, statsTraceCtx, transportTracer);
this.statsTraceCtx = checkNotNull(statsTraceCtx, "statsTraceCtx");
if (options.getOnReadyThreshold() != null) {
this.setOnReadyThreshold(options.getOnReadyThreshold());
}
}
private void setFullStreamDecompression(boolean fullStreamDecompression) {
this.fullStreamDecompression = fullStreamDecompression;
}
private void setDecompressorRegistry(DecompressorRegistry decompressorRegistry) {
checkState(this.listener == null, "Already called start");
this.decompressorRegistry =
checkNotNull(decompressorRegistry, "decompressorRegistry");
}
@VisibleForTesting
public final void setListener(ClientStreamListener listener) {
checkState(this.listener == null, "Already called setListener");
this.listener = checkNotNull(listener, "listener");
}
@Override
public void deframerClosed(boolean hasPartialMessage) {
checkState(statusReported, "status should have been reported on deframer closed");
deframerClosed = true;
if (statusReportedIsOk && hasPartialMessage) {
transportReportStatus(
Status.INTERNAL.withDescription("Encountered end-of-stream mid-frame"),
true,
new Metadata());
}
if (deframerClosedTask != null) {
deframerClosedTask.run();
deframerClosedTask = null;
}
}
@Override
protected final ClientStreamListener listener() {
return listener;
}
private final void setOutboundClosed() {
outboundClosed = true;
}
protected final boolean isOutboundClosed() {
return outboundClosed;
}
/**
* Called by transport implementations when they receive headers.
*
* @param headers the parsed headers
*/
protected void inboundHeadersReceived(Metadata headers) {
checkState(!statusReported, "Received headers on closed stream");
statsTraceCtx.clientInboundHeaders(headers);
boolean compressedStream = false;
String streamEncoding = headers.get(CONTENT_ENCODING_KEY);
if (fullStreamDecompression && streamEncoding != null) {
if (streamEncoding.equalsIgnoreCase("gzip")) {
setFullStreamDecompressor(new GzipInflatingBuffer());
compressedStream = true;
} else if (!streamEncoding.equalsIgnoreCase("identity")) {
deframeFailed(
Status.INTERNAL
.withDescription(
String.format("Can't find full stream decompressor for %s", streamEncoding))
.asRuntimeException());
return;
}
}
String messageEncoding = headers.get(MESSAGE_ENCODING_KEY);
if (messageEncoding != null) {
Decompressor decompressor = decompressorRegistry.lookupDecompressor(messageEncoding);
if (decompressor == null) {
deframeFailed(
Status.INTERNAL
.withDescription(String.format("Can't find decompressor for %s", messageEncoding))
.asRuntimeException());
return;
} else if (decompressor != Codec.Identity.NONE) {
if (compressedStream) {
deframeFailed(
Status.INTERNAL
.withDescription("Full stream and gRPC message encoding cannot both be set")
.asRuntimeException());
return;
}
setDecompressor(decompressor);
}
}
listener().headersRead(headers);
}
/**
* Processes the contents of a received data frame from the server.
*
* @param frame the received data frame. Its ownership is transferred to this method.
*/
protected void inboundDataReceived(ReadableBuffer frame) {
checkNotNull(frame, "frame");
boolean needToCloseFrame = true;
try {
if (statusReported) {
log.log(Level.INFO, "Received data on closed stream");
return;
}
needToCloseFrame = false;
deframe(frame);
} finally {
if (needToCloseFrame) {
frame.close();
}
}
}
/**
* Processes the trailers and status from the server.
*
* @param trailers the received trailers
* @param status the status extracted from the trailers
*/
protected void inboundTrailersReceived(Metadata trailers, Status status) {
checkNotNull(status, "status");
checkNotNull(trailers, "trailers");
if (statusReported) {
log.log(Level.INFO, "Received trailers on closed stream:\n {1}\n {2}",
new Object[]{status, trailers});
return;
}
statsTraceCtx.clientInboundTrailers(trailers);
transportReportStatus(status, false, trailers);
}
/**
* Report stream closure with status to the application layer if not already reported. This
* method must be called from the transport thread.
*
* @param status the new status to set
* @param stopDelivery if {@code true}, interrupts any further delivery of inbound messages that
* may already be queued up in the deframer. If {@code false}, the listener will be
* notified immediately after all currently completed messages in the deframer have been
* delivered to the application.
* @param trailers new instance of {@code Trailers}, either empty or those returned by the
* server
*/
public final void transportReportStatus(final Status status, boolean stopDelivery,
final Metadata trailers) {
transportReportStatus(status, RpcProgress.PROCESSED, stopDelivery, trailers);
}
/**
* Report stream closure with status to the application layer if not already reported. This
* method must be called from the transport thread.
*
* @param status the new status to set
* @param rpcProgress RPC progress that the
* {@link ClientStreamListener#closed(Status, RpcProgress, Metadata)}
* will receive
* @param stopDelivery if {@code true}, interrupts any further delivery of inbound messages that
* may already be queued up in the deframer and overrides any previously queued status.
* If {@code false}, the listener will be notified immediately after all currently
* completed messages in the deframer have been delivered to the application.
* @param trailers new instance of {@code Trailers}, either empty or those returned by the
* server
*/
public final void transportReportStatus(
final Status status,
final RpcProgress rpcProgress,
boolean stopDelivery,
final Metadata trailers) {
checkNotNull(status, "status");
checkNotNull(trailers, "trailers");
// If stopDelivery, we continue in case previous invocation is waiting for stall
if (statusReported && !stopDelivery) {
return;
}
statusReported = true;
statusReportedIsOk = status.isOk();
onStreamDeallocated();
if (deframerClosed) {
deframerClosedTask = null;
closeListener(status, rpcProgress, trailers);
} else {
deframerClosedTask =
new Runnable() {
@Override
public void run() {
closeListener(status, rpcProgress, trailers);
}
};
closeDeframer(stopDelivery);
}
}
/**
* Closes the listener if not previously closed.
*
* @throws IllegalStateException if the call has not yet been started.
*/
private void closeListener(
Status status, RpcProgress rpcProgress, Metadata trailers) {
if (!listenerClosed) {
listenerClosed = true;
statsTraceCtx.streamClosed(status);
if (getTransportTracer() != null) {
getTransportTracer().reportStreamClosed(status.isOk());
}
listener().closed(status, rpcProgress, trailers);
}
}
}
private | TransportState |
java | google__auto | common/src/test/java/com/google/auto/common/BasicAnnotationProcessorTest.java | {
"start": 18246,
"end": 18822
} | class ____ {",
" <@"
+ TypeParameterRequiresGeneratedCode.class.getCanonicalName()
+ " T extends SomeGeneratedClass> void foo(T t) {}",
"}");
requiresGeneratedCodeDeferralTest(dependentTestFileObject);
}
@Test
public void properlyDefersProcessing_nestedTypeValidBeforeOuterType() {
JavaFileObject source =
JavaFileObjects.forSourceLines(
"test.ValidInRound2",
"package test;",
"",
"@" + AnAnnotation.class.getCanonicalName(),
"public | ClassA |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/enumeratedvalue/EnumAndColumnDefinitionTest.java | {
"start": 3102,
"end": 3212
} | enum ____ {
A,
B;
}
@Entity(name = "TestEntity")
@Table(name = "test_entity")
public static | AnotherMyEnum |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/rest/ParamDefinition.java | {
"start": 7219,
"end": 7590
} | enum ____
*/
public ParamDefinition allowableValues(List<String> allowableValues) {
List<ValueDefinition> list = new ArrayList<>();
for (String av : allowableValues) {
list.add(new ValueDefinition(av));
}
setAllowableValues(list);
return this;
}
/**
* Allowed values of the parameter when its an | type |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java | {
"start": 1018,
"end": 2528
} | class ____ {
public static final ParseField SEARCH_RESULTS_TOTAL_FIELD = new ParseField("total_results");
public static final ParseField SEARCH_RESULT_ITEMS_FIELD = new ParseField("items");
private static final ObjectParser<Map<String, Object>, AnalyticsEvent.Context> PARSER = new ObjectParser<>(
"search_results",
HashMap::new
);
private static final ObjectParser<Map<String, Object>, AnalyticsEvent.Context> ITEM_PARSER = new ObjectParser<>(
"search_results_item",
HashMap::new
);
static {
PARSER.declareObjectArray(
(b, v) -> b.put(SEARCH_RESULT_ITEMS_FIELD.getPreferredName(), v),
(p, c) -> Map.copyOf(ITEM_PARSER.parse(p, c)),
SEARCH_RESULT_ITEMS_FIELD
);
PARSER.declareInt((b, v) -> b.put(SEARCH_RESULTS_TOTAL_FIELD.getPreferredName(), v), SEARCH_RESULTS_TOTAL_FIELD);
ITEM_PARSER.declareObject(
(b, v) -> b.put(DOCUMENT_FIELD.getPreferredName(), v),
DocumentAnalyticsEventField::fromXContent,
DOCUMENT_FIELD
);
ITEM_PARSER.declareObject((b, v) -> b.put(PAGE_FIELD.getPreferredName(), v), PageAnalyticsEventField::fromXContent, PAGE_FIELD);
}
private SearchResultAnalyticsEventField() {}
public static Map<String, Object> fromXContent(XContentParser parser, AnalyticsEvent.Context context) throws IOException {
return Map.copyOf(PARSER.parse(parser, context));
}
}
| SearchResultAnalyticsEventField |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/KeyValueToTimestampedKeyValueIteratorAdapter.java | {
"start": 1118,
"end": 1375
} | class ____ used to ensure backward compatibility at DSL level between
* {@link org.apache.kafka.streams.state.TimestampedKeyValueStore} and
* {@link org.apache.kafka.streams.state.KeyValueStore}.
*
* @see KeyValueToTimestampedKeyValueByteStoreAdapter
*/
| is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/association/OneToManyAssociationTest.java | {
"start": 2254,
"end": 3099
} | class ____ {
@Id
Long id;
String name;
// HHH-13446 - Type not validated in bidirectional association mapping
@OneToMany(cascade = CascadeType.ALL, mappedBy = "custId", fetch = FetchType.EAGER)
List<CustomerInventory> inventoryIdList = new ArrayList<>();
@OneToMany( mappedBy = "customer", cascade = CascadeType.ALL, fetch = FetchType.EAGER )
List<CustomerInventory> customerInventories = new ArrayList<>();
void addInventory(CustomerInventory inventory) {
List<CustomerInventory> list = customerInventories;
list.add( inventory );
customerInventories = list;
}
List<CustomerInventory> getInventories() {
return Collections.unmodifiableList( customerInventories );
}
void removeInventory(CustomerInventory inventory) {
customerInventories.remove( inventory );
}
}
@Entity
private static | Customer |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DocsV3Support.java | {
"start": 17342,
"end": 27591
} | class ____ {
private Method staticMethod;
private Function<List<DataType>, License.OperationMode> fallbackLambda;
public LicenseRequirementChecker(Class<?> testClass) {
try {
staticMethod = testClass.getMethod("licenseRequirement", List.class);
if (License.OperationMode.class.equals(staticMethod.getReturnType()) == false
|| java.lang.reflect.Modifier.isStatic(staticMethod.getModifiers()) == false) {
staticMethod = null; // Reset if the method doesn't match the signature
}
} catch (NoSuchMethodException e) {
staticMethod = null;
}
if (staticMethod == null) {
fallbackLambda = fieldTypes -> License.OperationMode.BASIC;
}
}
public License.OperationMode invoke(List<DataType> fieldTypes) throws Exception {
if (staticMethod != null) {
return (License.OperationMode) staticMethod.invoke(null, fieldTypes);
} else {
return fallbackLambda.apply(fieldTypes);
}
}
}
protected final String category;
protected final String name;
protected final FunctionDefinition definition;
protected final Supplier<Set<TypeSignature>> signatures;
protected final Callbacks callbacks;
private final LicenseRequirementChecker licenseChecker;
private final KibanaSignaturePatcher kibanaSignaturePatcher;
protected DocsV3Support(
String category,
String name,
Class<?> testClass,
Supplier<Set<TypeSignature>> signatures,
Callbacks callbacks
) {
this(category, name, null, testClass, signatures, callbacks);
}
private DocsV3Support(
String category,
String name,
FunctionDefinition definition,
Class<?> testClass,
Supplier<Set<TypeSignature>> signatures,
Callbacks callbacks
) {
this.category = category;
this.name = name;
this.definition = definition == null ? definition(name) : definition;
this.signatures = signatures;
this.callbacks = callbacks;
this.licenseChecker = new LicenseRequirementChecker(testClass);
this.kibanaSignaturePatcher = new KibanaSignaturePatcher(testClass);
}
String replaceLinks(String text) {
return replaceAsciidocLinks(replaceMacros(text));
}
private String replaceAsciidocLinks(String text) {
Pattern pattern = Pattern.compile("<<([^>]*)>>");
Matcher matcher = pattern.matcher(text);
StringBuilder result = new StringBuilder();
while (matcher.find()) {
String match = matcher.group(1);
matcher.appendReplacement(result, getLink(match));
}
matcher.appendTail(result);
return result.toString();
}
private String replaceMacros(String text) {
Pattern pattern = Pattern.compile("\\{([^}]+)}(/[^\\[]+)\\[([^]]+)]");
Matcher matcher = pattern.matcher(text);
StringBuilder result = new StringBuilder();
while (matcher.find()) {
String macro = matcher.group(1);
String path = matcher.group(2);
String display = matcher.group(3);
matcher.appendReplacement(result, getMacroLink(macro, path, display));
}
matcher.appendTail(result);
return result.toString();
}
private String getMacroLink(String macro, String path, String display) {
if (MACROS.containsKey(macro) == false) {
throw new IllegalArgumentException("Unknown macro [" + macro + "]");
}
return String.format(Locale.ROOT, "[%s](%s%s)", display, MACROS.get(macro), path);
}
/**
* The new link syntax is extremely verbose.
* Rather than make a large number of messy changes to the java files, we simply re-write the existing links to the new syntax.
*/
private String getLink(String key) {
String[] parts = key.split(",\\s*");
// Inject esql examples
if (parts[0].equals("load-esql-example")) {
try {
Map<String, String> example = Arrays.stream(parts[1].trim().split("\\s+"))
.map(s -> s.split("\\s*=\\s*", 2))
.map(p -> entry(p[0], p[1]))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
return "```esql\n" + loadExample(example.get("file"), example.get("tag")) + "\n```\n";
} catch (IOException e) {
return e.getMessage();
}
}
// Known root query-languages markdown files
if (knownFiles.containsKey(parts[0])) {
return makeLink(key, "", "/reference/query-languages/" + knownFiles.get(parts[0]));
}
// Old-style links within ES|QL reference
if (key.startsWith("esql-")) {
String cmd = parts[0].replace("esql-", "");
String parentFile = parentFileFor(cmd);
return makeLink(key, "esql-", parentFile);
}
// Old-style links to Query DSL pages
if (key.startsWith("query-dsl-")) {
// <<query-dsl-match-query,match query>>
// [`match`](/reference/query-languages/query-dsl/query-dsl-match-query.md)
return makeLink(key, "query-dsl-", "/reference/query-languages/query-dsl/query-dsl-match-query.md");
}
// Known links to mapping-reference
if (knownMapping.containsKey(parts[0])) {
return makeLink(key, "", knownMapping.get(parts[0]));
}
// Various other remaining old asciidoc links
// <<match-field-params,match query parameters>>
return switch (parts[0]) {
case "match-field-params" -> makeLink(key, "", "/reference/query-languages/query-dsl/query-dsl-match-query.md");
case "search-aggregations-bucket-histogram-aggregation" -> makeLink(
key,
"",
"/reference/aggregations/search-aggregations-bucket-histogram-aggregation.md"
);
default -> throw new IllegalArgumentException("Invalid link key <<" + key + ">>");
};
}
private String parentFileFor(String cmd) {
if (knownCommands.containsKey(cmd)) {
return "/reference/query-languages/esql/commands/processing-commands.md";
} else if (cmd.startsWith("mv_")) {
return "/reference/query-languages/esql/functions-operators/mv-functions.md";
} else if (cmd.contains("-operator")) {
return "/reference/query-languages/esql/functions-operators/operators.md";
} else if (cmd.startsWith("st_")) {
return "/reference/query-languages/esql/functions-operators/spatial-functions.md";
} else if (cmd.startsWith("to_")) {
return "/reference/query-languages/esql/functions-operators/type-conversion-functions.md";
} else if (cmd.startsWith("date_")) {
return "/reference/query-languages/esql/functions-operators/date-time-functions.md";
} else if (cmd.equals("split")) {
return "/reference/query-languages/esql/functions-operators/string-functions.md";
} else {
return "/reference/query-languages/esql/functions-operators/aggregation-functions.md";
}
}
private String makeLink(String key, String prefix, String parentFile) {
String displayText = key.substring(prefix.length());
if (knownCommands.containsKey(displayText)) {
displayText = "`" + knownCommands.get(displayText).toUpperCase(Locale.ROOT) + "`";
} else if (knownOperators.containsKey(displayText)) {
displayText = "`" + knownOperators.get(displayText) + "`";
} else {
int comma = displayText.indexOf(',');
if (comma > 0) {
key = prefix + displayText.substring(0, comma);
displayText = displayText.substring(comma + 1).trim();
} else if (parentFile.contains("esql/esql-") || parentFile.contains("esql/functions-operators")) {
// For ES|QL commands and functions we normally make uppercase code
displayText = "`" + displayText.toUpperCase(Locale.ROOT) + "`";
}
}
if (parentFile.contains("/" + key + ".md") || parentFile.contains("/" + key.replaceAll("esql-", "") + ".md")) {
// The current docs-builder strips off all link targets that match the filename, so we need to do the same
return String.format(Locale.ROOT, "[%s](%s)", displayText, parentFile);
} else {
return String.format(Locale.ROOT, "[%s](%s#%s)", displayText, parentFile, key);
}
}
void writeToTempImageDir(String str) throws IOException {
assert callbacks.supportsRendering();
// We have to write to a tempdir because it’s all test are allowed to write to. Gradle can move them.
Path dir = PathUtils.get(System.getProperty("java.io.tmpdir")).resolve("esql").resolve("images").resolve(category);
callbacks.write(dir, name, "svg", str, false);
}
void writeToTempSnippetsDir(String subdir, String str) throws IOException {
// We have to write to a tempdir because it’s all test are allowed to write to. Gradle can move them.
Path dir = PathUtils.get(System.getProperty("java.io.tmpdir"))
.resolve("esql")
.resolve("_snippets")
.resolve(category)
.resolve(subdir);
callbacks.write(dir, name, "md", str, false);
}
void writeToTempKibanaDir(String subdir, String extension, String str) throws IOException {
// We have to write to a tempdir because it’s all test are allowed to write to. Gradle can move them.
Path dir = PathUtils.get(System.getProperty("java.io.tmpdir")).resolve("esql").resolve("kibana").resolve(subdir).resolve(category);
callbacks.write(dir, name, extension, str, true);
}
protected abstract void renderSignature() throws IOException;
protected abstract void renderDocs() throws Exception;
static | LicenseRequirementChecker |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/component/extension/verifier/DefaultComponentVerifierExtension.java | {
"start": 1586,
"end": 8690
} | class ____ implements ComponentVerifierExtension, CamelContextAware, ComponentAware {
private final String defaultScheme;
private Component component;
private CamelContext camelContext;
protected DefaultComponentVerifierExtension(String defaultScheme) {
this(defaultScheme, null, null);
}
protected DefaultComponentVerifierExtension(String defaultScheme, CamelContext camelContext) {
this(defaultScheme, camelContext, null);
}
protected DefaultComponentVerifierExtension(String defaultScheme, CamelContext camelContext, Component component) {
this.defaultScheme = defaultScheme;
this.camelContext = camelContext;
this.component = component;
}
// *************************************
//
// *************************************
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public Component getComponent() {
return component;
}
@Override
public void setComponent(Component component) {
this.component = component;
}
@Override
public Result verify(Scope scope, Map<String, Object> parameters) {
// Camel context is mandatory
if (this.camelContext == null) {
return ResultBuilder.withStatusAndScope(Result.Status.ERROR, scope)
.error(ResultErrorBuilder
.withCodeAndDescription(VerificationError.StandardCode.INTERNAL, "Missing camel-context").build())
.build();
}
if (scope == Scope.PARAMETERS) {
return verifyParameters(parameters);
}
if (scope == Scope.CONNECTIVITY) {
return verifyConnectivity(parameters);
}
return ResultBuilder.unsupportedScope(scope).build();
}
protected Result verifyConnectivity(Map<String, Object> parameters) {
return ResultBuilder.withStatusAndScope(Result.Status.UNSUPPORTED, Scope.CONNECTIVITY).build();
}
protected Result verifyParameters(Map<String, Object> parameters) {
ResultBuilder builder = ResultBuilder.withStatusAndScope(Result.Status.OK, Scope.PARAMETERS);
// Validate against catalog
verifyParametersAgainstCatalog(builder, parameters);
return builder.build();
}
// *************************************
// Helpers :: Parameters validation
// *************************************
protected void verifyParametersAgainstCatalog(ResultBuilder builder, Map<String, Object> parameters) {
verifyParametersAgainstCatalog(builder, parameters, new CatalogVerifierCustomizer());
}
protected void verifyParametersAgainstCatalog(
ResultBuilder builder, Map<String, Object> parameters, CatalogVerifierCustomizer customizer) {
String scheme = defaultScheme;
if (parameters.containsKey("scheme")) {
scheme = parameters.get("scheme").toString();
}
// Grab the runtime catalog to check parameters
RuntimeCamelCatalog catalog = PluginHelper.getRuntimeCamelCatalog(camelContext);
// Convert from Map<String, Object> to Map<String, String> as required
// by the Camel Catalog
EndpointValidationResult result = catalog.validateProperties(
scheme,
parameters.entrySet().stream()
.collect(
Collectors.toMap(
Map.Entry::getKey,
e -> camelContext.getTypeConverter().convertTo(String.class, e.getValue()))));
if (!result.isSuccess()) {
if (customizer.isIncludeUnknown()) {
stream(result.getUnknown())
.map(option -> ResultErrorBuilder.withUnknownOption(option).build())
.forEach(builder::error);
}
if (customizer.isIncludeRequired()) {
stream(result.getRequired())
.map(option -> ResultErrorBuilder.withMissingOption(option).build())
.forEach(builder::error);
}
if (customizer.isIncludeInvalidBoolean()) {
stream(result.getInvalidBoolean())
.map(entry -> ResultErrorBuilder.withIllegalOption(entry.getKey(), entry.getValue()).build())
.forEach(builder::error);
}
if (customizer.isIncludeInvalidInteger()) {
stream(result.getInvalidInteger())
.map(entry -> ResultErrorBuilder.withIllegalOption(entry.getKey(), entry.getValue()).build())
.forEach(builder::error);
}
if (customizer.isIncludeInvalidNumber()) {
stream(result.getInvalidNumber())
.map(entry -> ResultErrorBuilder.withIllegalOption(entry.getKey(), entry.getValue()).build())
.forEach(builder::error);
}
if (customizer.isIncludeInvalidEnum()) {
stream(result.getInvalidEnum())
.map(entry -> ResultErrorBuilder.withIllegalOption(entry.getKey(), entry.getValue())
.detail("enum.values", result.getEnumChoices(entry.getKey()))
.build())
.forEach(builder::error);
}
}
}
// *************************************
// Helpers
// *************************************
protected <T> T setProperties(T instance, Map<String, Object> properties) throws Exception {
if (camelContext == null) {
throw new IllegalStateException("Camel context is null");
}
if (!properties.isEmpty()) {
PropertyBindingSupport.build().bind(camelContext, instance, properties);
}
return instance;
}
protected <T> T setProperties(T instance, String prefix, Map<String, Object> properties) throws Exception {
return setProperties(instance, PropertiesHelper.extractProperties(properties, prefix, false));
}
protected <T> Optional<T> getOption(Map<String, Object> parameters, String key, Class<T> type) {
Object value = parameters.get(key);
if (value != null) {
return Optional.ofNullable(CamelContextHelper.convertTo(camelContext, type, value));
}
return Optional.empty();
}
protected <T> T getOption(Map<String, Object> parameters, String key, Class<T> type, Supplier<T> defaultSupplier) {
return getOption(parameters, key, type).orElseGet(defaultSupplier);
}
protected <T> T getMandatoryOption(Map<String, Object> parameters, String key, Class<T> type) throws NoSuchOptionException {
return getOption(parameters, key, type).orElseThrow(() -> new NoSuchOptionException(key));
}
}
| DefaultComponentVerifierExtension |
java | apache__camel | components/camel-printer/src/main/java/org/apache/camel/component/printer/DocFlavorAssigner.java | {
"start": 886,
"end": 10696
} | class ____ {
private DocFlavor d = DocFlavor.BYTE_ARRAY.AUTOSENSE;
public DocFlavor forMimeTypeAUTOSENSE(String flavor) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.AUTOSENSE;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.AUTOSENSE;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.AUTOSENSE;
}
return d;
}
public DocFlavor forMimeTypeGIF(String flavor) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.GIF;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.GIF;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.GIF;
}
return d;
}
public DocFlavor forMimeTypeJPEG(String flavor) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.JPEG;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.JPEG;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.JPEG;
}
return d;
}
public DocFlavor forMimeTypePDF(String flavor) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.PDF;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.PDF;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.PDF;
}
return d;
}
public DocFlavor forMimeTypePCL(String flavor) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.PCL;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.PCL;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.PCL;
}
return d;
}
public DocFlavor forMimeTypePOSTSCRIPT(String flavor) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.POSTSCRIPT;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.POSTSCRIPT;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.POSTSCRIPT;
}
return d;
}
public DocFlavor forMimeTypeHOST(String flavor, String mimeType) {
if (mimeType.equalsIgnoreCase("TEXT_HTML_HOST")) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_HTML_HOST;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_HTML_HOST;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_HTML_HOST;
}
} else {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_PLAIN_HOST;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_PLAIN_HOST;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_PLAIN_HOST;
}
}
return d;
}
public DocFlavor forMimeTypeUSASCII(String flavor, String mimeType) {
if (mimeType.equalsIgnoreCase("TEXT_HTML_US_ASCII")) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_HTML_US_ASCII;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_HTML_US_ASCII;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_HTML_US_ASCII;
}
} else {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_PLAIN_US_ASCII;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_PLAIN_US_ASCII;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_PLAIN_US_ASCII;
}
}
return d;
}
public DocFlavor forMimeTypeUTF16(String flavor, String mimeType) {
if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16")) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_HTML_UTF_16;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_HTML_UTF_16;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_HTML_UTF_16;
}
} else {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_PLAIN_UTF_16;
}
}
return d;
}
public DocFlavor forMimeTypeUTF16LE(String flavor, String mimeType) {
if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16LE")) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_HTML_UTF_16LE;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_HTML_UTF_16LE;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_HTML_UTF_16LE;
}
} else {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16LE;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16LE;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_PLAIN_UTF_16LE;
}
}
return d;
}
public DocFlavor forMimeTypeUTF16BE(String flavor, String mimeType) {
if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16BE")) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_HTML_UTF_16BE;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_HTML_UTF_16BE;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_HTML_UTF_16BE;
}
} else {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16BE;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16BE;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_PLAIN_UTF_16BE;
}
}
return d;
}
public DocFlavor forMimeTypeUTF8(String flavor, String mimeType) {
if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16BE")) {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_HTML_UTF_8;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_HTML_UTF_8;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_HTML_UTF_8;
}
} else {
if (flavor.equalsIgnoreCase("DocFlavor.BYTE_ARRAY")) {
d = DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_8;
} else if (flavor.equalsIgnoreCase("DocFlavor.INPUT_STREAM")) {
d = DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_8;
} else if (flavor.equalsIgnoreCase("DocFlavor.URL")) {
d = DocFlavor.URL.TEXT_PLAIN_UTF_8;
}
}
return d;
}
public DocFlavor forMimeTypeBasic(String flavor, String mimeType) {
if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16BE")) {
if (flavor.equalsIgnoreCase("DocFlavor.CHAR_ARRAY")) {
d = DocFlavor.CHAR_ARRAY.TEXT_HTML;
} else if (flavor.equalsIgnoreCase("DocFlavor.READER")) {
d = DocFlavor.READER.TEXT_HTML;
} else if (flavor.equalsIgnoreCase("DocFlavor.STRING")) {
d = DocFlavor.STRING.TEXT_HTML;
}
} else {
if (flavor.equalsIgnoreCase("DocFlavor.CHAR_ARRAY")) {
d = DocFlavor.CHAR_ARRAY.TEXT_PLAIN;
} else if (flavor.equalsIgnoreCase("DocFlavor.READER")) {
d = DocFlavor.READER.TEXT_PLAIN;
} else if (flavor.equalsIgnoreCase("DocFlavor.STRING")) {
d = DocFlavor.STRING.TEXT_PLAIN;
}
}
return d;
}
public DocFlavor forMimeTypePAGEABLE() {
d = DocFlavor.SERVICE_FORMATTED.PAGEABLE;
return d;
}
public DocFlavor forMimeTypePRINTABLE() {
d = DocFlavor.SERVICE_FORMATTED.PRINTABLE;
return d;
}
public DocFlavor forMimeTypeRENDERABLEIMAGE() {
d = DocFlavor.SERVICE_FORMATTED.RENDERABLE_IMAGE;
return d;
}
}
| DocFlavorAssigner |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sql/spi/NativeSelectQueryDefinition.java | {
"start": 328,
"end": 794
} | interface ____<R> {
String getSqlString();
/**
* @apiNote This returns query parameters in the order they were
* encountered - potentially including "duplicate references" to a single parameter
*/
List<ParameterOccurrence> getQueryParameterOccurrences();
ResultSetMapping getResultSetMapping();
Set<String> getAffectedTableNames();
// todo (6.0) : drop support for executing callables via NativeQuery
boolean isCallable();
}
| NativeSelectQueryDefinition |
java | quarkusio__quarkus | extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/interceptors/GrpcContextPropagationTest.java | {
"start": 664,
"end": 1440
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addPackage(GreeterGrpc.class.getPackage())
.addClasses(MyFirstInterceptor.class, MyInterceptedGreeting.class));
@GrpcClient
Greeter greeter;
@Test
void test() {
HelloReply foo = greeter.sayHello(HelloRequest.newBuilder().setName("foo").build()).await().indefinitely();
assertThat(foo.getMessage()).isEqualTo("hello k1 - 1");
foo = greeter.sayHello(HelloRequest.newBuilder().setName("foo").build()).await().indefinitely();
assertThat(foo.getMessage()).isEqualTo("hello k1 - 2");
}
}
| GrpcContextPropagationTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java | {
"start": 191486,
"end": 192957
} | class ____ implements ConfigurableClusterPrivilege {
@Override
public Category getCategory() {
return Category.APPLICATION;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder;
}
@Override
public String getWriteableName() {
return "mock";
}
@Override
public void writeTo(StreamOutput out) throws IOException {}
}
private static ResolvedIndexExpression resolvedIndexExpression(
String original,
Set<String> localExpressions,
ResolvedIndexExpression.LocalIndexResolutionResult localIndexResolutionResult
) {
return new ResolvedIndexExpression(
original,
new ResolvedIndexExpression.LocalExpressions(localExpressions, localIndexResolutionResult, null),
Set.of()
);
}
private static ResolvedIndexExpression resolvedIndexExpression(
String original,
Set<String> localExpressions,
ResolvedIndexExpression.LocalIndexResolutionResult localIndexResolutionResult,
ElasticsearchException exception
) {
return new ResolvedIndexExpression(
original,
new ResolvedIndexExpression.LocalExpressions(localExpressions, localIndexResolutionResult, exception),
Set.of()
);
}
}
| MockConfigurableClusterPrivilege |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/messages/WebMonitorMessagesTest.java | {
"start": 1763,
"end": 6766
} | class ____ {
@Test
void testStatusMessages() throws Exception {
final Random rnd = new Random();
GenericMessageTester.testMessageInstance(RequestJobsOverview.getInstance());
GenericMessageTester.testMessageInstance(RequestJobsWithIDsOverview.getInstance());
GenericMessageTester.testMessageInstance(RequestStatusOverview.getInstance());
GenericMessageTester.testMessageInstance(RequestJobsOverview.getInstance());
GenericMessageTester.testMessageInstance(
GenericMessageTester.instantiateGeneric(RequestJobDetails.class, rnd));
GenericMessageTester.testMessageInstance(
GenericMessageTester.instantiateGeneric(ClusterOverview.class, rnd));
GenericMessageTester.testMessageInstance(
GenericMessageTester.instantiateGeneric(JobsOverview.class, rnd));
GenericMessageTester.testMessageInstance(
new JobIdsWithStatusOverview(
Arrays.asList(
new JobIdsWithStatusOverview.JobIdWithStatus(
JobID.generate(), JobStatus.RUNNING),
new JobIdsWithStatusOverview.JobIdWithStatus(
JobID.generate(), JobStatus.CANCELED),
new JobIdsWithStatusOverview.JobIdWithStatus(
JobID.generate(), JobStatus.CREATED),
new JobIdsWithStatusOverview.JobIdWithStatus(
JobID.generate(), JobStatus.FAILED),
new JobIdsWithStatusOverview.JobIdWithStatus(
JobID.generate(), JobStatus.RESTARTING))));
}
@Test
void testJobDetailsMessage() throws Exception {
final Random rnd = new Random();
int[] numVerticesPerState = new int[ExecutionState.values().length];
int numTotal = 0;
for (int i = 0; i < numVerticesPerState.length; i++) {
int count = rnd.nextInt(55);
numVerticesPerState[i] = count;
numTotal += count;
}
long time = rnd.nextLong();
long endTime = rnd.nextBoolean() ? -1L : time + rnd.nextInt();
long lastModified = endTime == -1 ? time + rnd.nextInt() : endTime;
String name = GenericMessageTester.randomString(rnd);
JobID jid = GenericMessageTester.randomJobId(rnd);
JobStatus status = GenericMessageTester.randomJobStatus(rnd);
JobDetails msg1 =
new JobDetails(
jid,
name,
time,
endTime,
endTime - time,
status,
lastModified,
numVerticesPerState,
numTotal);
JobDetails msg2 =
new JobDetails(
jid,
name,
time,
endTime,
endTime - time,
status,
lastModified,
numVerticesPerState,
numTotal);
GenericMessageTester.testMessageInstances(msg1, msg2);
}
@Test
void testMultipleJobDetails() throws Exception {
final Random rnd = new Random();
GenericMessageTester.testMessageInstance(new MultipleJobsDetails(randomJobDetails(rnd)));
}
private Collection<JobDetails> randomJobDetails(Random rnd) {
final JobDetails[] details = new JobDetails[rnd.nextInt(10)];
for (int k = 0; k < details.length; k++) {
int[] numVerticesPerState = new int[ExecutionState.values().length];
int numTotal = 0;
for (int i = 0; i < numVerticesPerState.length; i++) {
int count = rnd.nextInt(55);
numVerticesPerState[i] = count;
numTotal += count;
}
long time = rnd.nextLong();
long endTime = rnd.nextBoolean() ? -1L : time + rnd.nextInt();
long lastModified = endTime == -1 ? time + rnd.nextInt() : endTime;
String name = new GenericMessageTester.StringInstantiator().instantiate(rnd);
JobID jid = new JobID();
JobStatus status = JobStatus.values()[rnd.nextInt(JobStatus.values().length)];
details[k] =
new JobDetails(
jid,
name,
time,
endTime,
endTime - time,
status,
lastModified,
numVerticesPerState,
numTotal);
}
return Arrays.asList(details);
}
}
| WebMonitorMessagesTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/AttributeAccessorSupportTests.java | {
"start": 960,
"end": 2927
} | class ____ {
private static final String NAME = "name";
private static final String VALUE = "value";
private final AttributeAccessor attributeAccessor = new SimpleAttributeAccessorSupport();
@Test
void setAndGet() {
this.attributeAccessor.setAttribute(NAME, VALUE);
assertThat(this.attributeAccessor.getAttribute(NAME)).isEqualTo(VALUE);
}
@Test
void setAndHas() {
assertThat(this.attributeAccessor.hasAttribute(NAME)).isFalse();
this.attributeAccessor.setAttribute(NAME, VALUE);
assertThat(this.attributeAccessor.hasAttribute(NAME)).isTrue();
}
@Test
void computeAttribute() {
AtomicInteger atomicInteger = new AtomicInteger();
Function<String, String> computeFunction = name -> "computed-" + atomicInteger.incrementAndGet();
assertThat(this.attributeAccessor.hasAttribute(NAME)).isFalse();
this.attributeAccessor.computeAttribute(NAME, computeFunction);
assertThat(this.attributeAccessor.getAttribute(NAME)).isEqualTo("computed-1");
this.attributeAccessor.computeAttribute(NAME, computeFunction);
assertThat(this.attributeAccessor.getAttribute(NAME)).isEqualTo("computed-1");
this.attributeAccessor.removeAttribute(NAME);
assertThat(this.attributeAccessor.hasAttribute(NAME)).isFalse();
this.attributeAccessor.computeAttribute(NAME, computeFunction);
assertThat(this.attributeAccessor.getAttribute(NAME)).isEqualTo("computed-2");
}
@Test
void remove() {
assertThat(this.attributeAccessor.hasAttribute(NAME)).isFalse();
this.attributeAccessor.setAttribute(NAME, VALUE);
assertThat(this.attributeAccessor.removeAttribute(NAME)).isEqualTo(VALUE);
assertThat(this.attributeAccessor.hasAttribute(NAME)).isFalse();
}
@Test
void attributeNames() {
this.attributeAccessor.setAttribute(NAME, VALUE);
this.attributeAccessor.setAttribute("abc", "123");
assertThat(this.attributeAccessor.attributeNames()).contains("abc", NAME);
}
@SuppressWarnings("serial")
private static | AttributeAccessorSupportTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/exc/JacksonExceptionSerializationTest.java | {
"start": 362,
"end": 1308
} | class ____
{
/*
/**********************************************************************
/* Test methods
/**********************************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
// [databind#3244]: StackOverflow for basic JsonProcessingException?
@Test
public void testIssue3244() throws Exception {
JacksonException e = null;
try {
MAPPER.readValue("{ foo ", Map.class);
fail("Should not pass");
} catch (JacksonException e0) {
e = e0;
}
String json = MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(e);
//System.err.println("JSON: "+json);
// Could try proper validation, but for now just ensure we won't crash
assertNotNull(json);
JsonNode n = MAPPER.readTree(json);
assertTrue(n.isObject());
}
}
| JacksonExceptionSerializationTest |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/http/fileupload/MultipartFormUploadTest.java | {
"start": 1473,
"end": 4553
} | class ____ extends HttpTestBase {
@ClassRule
public static TemporaryFolder testFolder = new TemporaryFolder();
private VertxInternal vertx;
@Before
public void setUp() throws Exception {
super.setUp();
vertx = (VertxInternal) Vertx.vertx();
}
@Test
public void testSimpleAttribute() throws Exception {
Buffer result = Buffer.buffer();
ContextInternal context = vertx.getOrCreateContext();
ClientMultipartFormUpload upload = new ClientMultipartFormUpload(context, (ClientMultipartFormImpl) ClientForm.form().attribute("foo", "bar"), false, HttpPostRequestEncoder.EncoderMode.RFC1738);
upload.endHandler(v -> {
assertEquals("foo=bar", result.toString());
testComplete();
});
upload.handler(result::appendBuffer);
upload.resume();
context.runOnContext(v -> upload.pump());
}
@Test
public void testFileUploadEventLoopContext() throws Exception {
testFileUpload(vertx.createEventLoopContext(), false);
}
@Test
public void testFileUploadWorkerContext() throws Exception {
testFileUpload(vertx.createWorkerContext(), false);
}
@Test
public void testFileUploadVirtualThreadContext() throws Exception {
assumeTrue(vertx.isVirtualThreadAvailable());
testFileUpload(vertx.createVirtualThreadContext(), false);
}
@Test
public void testFileUploadPausedEventLoopContext() throws Exception {
testFileUpload(vertx.createEventLoopContext(), true);
}
@Test
public void testFileUploadPausedWorkerContext() throws Exception {
testFileUpload(vertx.createWorkerContext(), true);
}
@Test
public void testFileUploadPausedVirtualThreadContext() throws Exception {
assumeTrue(vertx.isVirtualThreadAvailable());
testFileUpload(vertx.createVirtualThreadContext(), true);
}
private void testFileUpload(ContextInternal context, boolean paused) throws Exception {
File file = testFolder.newFile();
Files.write(file.toPath(), TestUtils.randomByteArray(32 * 1024));
String filename = file.getName();
String pathname = file.getAbsolutePath();
context.runOnContext(v1 -> {
try {
ClientMultipartFormUpload upload = new ClientMultipartFormUpload(context, (ClientMultipartFormImpl) ClientMultipartForm
.multipartForm()
.textFileUpload("the-file", filename, "text/plain", pathname)
, true, HttpPostRequestEncoder.EncoderMode.RFC1738);
List<Buffer> buffers = Collections.synchronizedList(new ArrayList<>());
AtomicInteger end = new AtomicInteger();
upload.endHandler(v2 -> {
assertEquals(0, end.getAndIncrement());
assertFalse(buffers.isEmpty());
testComplete();
});
upload.handler(buffer -> {
assertEquals(0, end.get());
buffers.add(buffer);
});
if (!paused) {
upload.resume();
}
upload.pump();
if (paused) {
context.runOnContext(v3 -> upload.resume());
}
} catch (Exception e) {
fail(e);
}
});
}
}
| MultipartFormUploadTest |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConditionNode.java | {
"start": 570,
"end": 1219
} | class ____ extends StatementNode {
/* ---- begin tree structure ---- */
private ExpressionNode conditionNode;
private BlockNode blockNode;
public void setConditionNode(ExpressionNode conditionNode) {
this.conditionNode = conditionNode;
}
public ExpressionNode getConditionNode() {
return conditionNode;
}
public void setBlockNode(BlockNode blockNode) {
this.blockNode = blockNode;
}
public BlockNode getBlockNode() {
return blockNode;
}
/* ---- end tree structure ---- */
public ConditionNode(Location location) {
super(location);
}
}
| ConditionNode |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/serializer/PropertyFilter.java | {
"start": 701,
"end": 1051
} | interface ____ extends SerializeFilter {
/**
* @param object the owner of the property
* @param name the name of the property
* @param value the value of the property
* @return true if the property will be included, false if to be filtered out
*/
boolean apply(Object object, String name, Object value);
}
| PropertyFilter |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/search/ReduceSearchPhaseException.java | {
"start": 851,
"end": 1540
} | class ____ extends SearchPhaseExecutionException {
public ReduceSearchPhaseException(String phaseName, String msg, Throwable cause, ShardSearchFailure[] shardFailures) {
super(phaseName, "[reduce] " + msg, cause, shardFailures);
}
public ReduceSearchPhaseException(StreamInput in) throws IOException {
super(in);
}
@Override
public RestStatus status() {
final ShardSearchFailure[] shardFailures = shardFailures();
if (shardFailures.length == 0) {
return getCause() == null ? RestStatus.INTERNAL_SERVER_ERROR : ExceptionsHelper.status(getCause());
}
return super.status();
}
}
| ReduceSearchPhaseException |
java | apache__camel | core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/ManagedSchedulePollConsumerMBean.java | {
"start": 973,
"end": 4681
} | interface ____ extends ManagedConsumerMBean {
@ManagedAttribute(description = "Scheduled Delay")
long getDelay();
@ManagedAttribute(description = "Scheduled Delay")
void setDelay(long delay);
@ManagedAttribute(description = "Scheduled Initial Delay")
long getInitialDelay();
@ManagedAttribute(description = "Scheduled Initial Delay")
void setInitialDelay(long initialDelay);
@ManagedAttribute(description = "Scheduled Fixed Delay")
boolean isUseFixedDelay();
@ManagedAttribute(description = "Scheduled Fixed Delay")
void setUseFixedDelay(boolean useFixedDelay);
@ManagedAttribute(description = "Scheduled Greedy")
boolean isGreedy();
@ManagedAttribute(description = "Scheduled Greedy")
void setGreedy(boolean greedy);
@ManagedAttribute(description = "If the polling consumer did not poll any files, you can enable this option to send an empty message (no body) instead")
boolean isSendEmptyMessageWhenIdle();
@ManagedAttribute(description = "If the polling consumer did not poll any files, you can enable this option to send an empty message (no body) instead")
void setSendEmptyMessageWhenIdle(boolean sendEmptyMessageWhenIdle);
@ManagedAttribute(description = "The consumer logs a start/complete log line when it polls. This option allows you to configure the logging level for that.")
String getRunningLoggingLevel();
@ManagedAttribute(description = "The consumer logs a start/complete log line when it polls. This option allows you to configure the logging level for that.")
void setRunningLoggingLevel(String runningLoggingLevel);
@ManagedAttribute(description = "Scheduled TimeUnit")
String getTimeUnit();
@ManagedAttribute(description = "Scheduled TimeUnit")
void setTimeUnit(String timeUnit);
@ManagedAttribute(description = "Is the scheduler currently polling")
boolean isPolling();
@ManagedAttribute(description = "Is the scheduler started")
boolean isSchedulerStarted();
@ManagedOperation(description = "Starts the scheduler")
void startScheduler();
@ManagedAttribute(description = "Scheduler classname")
String getSchedulerClassName();
@ManagedAttribute(description = "Backoff multiplier")
int getBackoffMultiplier();
@ManagedAttribute(description = "Backoff idle threshold")
int getBackoffIdleThreshold();
@ManagedAttribute(description = "Backoff error threshold")
int getBackoffErrorThreshold();
@ManagedAttribute(description = "Current backoff counter")
int getBackoffCounter();
@ManagedAttribute(description = "Repeat count")
long getRepeatCount();
@ManagedAttribute(description = "Whether a first pool attempt has been done (also if the consumer has been restarted)")
boolean isFirstPollDone();
@ManagedAttribute(description = "Whether the consumer is ready to handle incoming traffic (used for readiness health-check)")
boolean isConsumerReady();
@ManagedAttribute(description = "Total number of polls run")
long getCounter();
@ManagedAttribute(description = "Error counter. If the counter is > 0 that means the consumer failed polling for the last N number of times."
+ " When the consumer is successfully again, then the error counter resets to zero.")
long getErrorCounter();
@ManagedAttribute(description = "Success counter. If the success is > 0 that means the consumer succeeded polling for the last N number of times."
+ " When the consumer is failing again, then the success counter resets to zero.")
long getSuccessCounter();
}
| ManagedSchedulePollConsumerMBean |
java | grpc__grpc-java | benchmarks/src/generated/main/grpc/io/grpc/benchmarks/proto/ReportQpsScenarioServiceGrpc.java | {
"start": 5636,
"end": 5744
} | class ____ the server implementation of the service ReportQpsScenarioService.
*/
public static abstract | for |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/jdbc/SqlConfig.java | {
"start": 8763,
"end": 11918
} | enum ____ {
/**
* Indicates that the <em>default</em> transaction mode should be used.
* <p>The meaning of <em>default</em> depends on the context in which
* {@code @SqlConfig} is declared:
* <ul>
* <li>If {@code @SqlConfig} is declared <strong>only</strong> locally,
* the default transaction mode is {@link #INFERRED}.</li>
* <li>If {@code @SqlConfig} is declared globally, the default transaction
* mode is {@link #INFERRED}.</li>
* <li>If {@code @SqlConfig} is declared globally <strong>and</strong>
* locally, the default transaction mode for the local declaration is
* inherited from the global declaration.</li>
* </ul>
*/
DEFAULT,
/**
* Indicates that the transaction mode to use when executing SQL
* scripts should be <em>inferred</em> using the rules listed below.
* In the context of these rules, the term "<em>available</em>"
* means that the bean for the data source or transaction manager
* is either explicitly specified via a corresponding annotation
* attribute in {@code @SqlConfig} or discoverable via conventions. See
* {@link org.springframework.test.context.transaction.TestContextTransactionUtils TestContextTransactionUtils}
* for details on the conventions used to discover such beans in
* the {@code ApplicationContext}.
*
* <h4>Inference Rules</h4>
* <ol>
* <li>If neither a transaction manager nor a data source is
* available, an exception will be thrown.
* <li>If a transaction manager is not available but a data source
* is available, SQL scripts will be executed directly against the
* data source without a transaction.
* <li>If a transaction manager is available:
* <ul>
* <li>If a data source is not available, an attempt will be made
* to retrieve it from the transaction manager by using reflection
* to invoke a public method named {@code getDataSource()} on the
* transaction manager. If the attempt fails, an exception will be
* thrown.
* <li>Using the resolved transaction manager and data source, SQL
* scripts will be executed within an existing transaction if
* present; otherwise, scripts will be executed in a new transaction
* that will be immediately committed. An <em>existing</em>
* transaction will typically be managed by the
* {@link org.springframework.test.context.transaction.TransactionalTestExecutionListener TransactionalTestExecutionListener}.
* </ul>
* </ol>
* @see #ISOLATED
* @see org.springframework.test.context.transaction.TestContextTransactionUtils#retrieveDataSource
* @see org.springframework.test.context.transaction.TestContextTransactionUtils#retrieveTransactionManager
*/
INFERRED,
/**
* Indicates that SQL scripts should always be executed in a new,
* <em>isolated</em> transaction that will be immediately committed.
* <p>In contrast to {@link #INFERRED}, this mode requires the
* presence of a transaction manager <strong>and</strong> a data
* source.
*/
ISOLATED
}
/**
* Enumeration of <em>modes</em> that dictate how errors are handled while
* executing SQL statements.
*/
| TransactionMode |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java | {
"start": 1808,
"end": 6034
} | class ____ extends SocketIOWithTimeout {
ReadableByteChannel channel;
Reader(ReadableByteChannel channel, long timeout) throws IOException {
super((SelectableChannel)channel, timeout);
this.channel = channel;
}
@Override
int performIO(ByteBuffer buf) throws IOException {
return channel.read(buf);
}
}
/**
* Create a new input stream with the given timeout. If the timeout
* is zero, it will be treated as infinite timeout. The socket's
* channel will be configured to be non-blocking.
*
* @param channel
* Channel for reading, should also be a {@link SelectableChannel}.
* The channel will be configured to be non-blocking.
* @param timeout timeout in milliseconds. must not be negative.
* @throws IOException raised on errors performing I/O.
*/
public SocketInputStream(ReadableByteChannel channel, long timeout)
throws IOException {
SocketIOWithTimeout.checkChannelValidity(channel);
reader = new Reader(channel, timeout);
}
/**
* Same as SocketInputStream(socket.getChannel(), timeout): <br><br>
*
* Create a new input stream with the given timeout. If the timeout
* is zero, it will be treated as infinite timeout. The socket's
* channel will be configured to be non-blocking.
*
* @see SocketInputStream#SocketInputStream(ReadableByteChannel, long)
*
* @param socket should have a channel associated with it.
* @param timeout timeout timeout in milliseconds. must not be negative.
* @throws IOException raised on errors performing I/O.
*/
public SocketInputStream(Socket socket, long timeout)
throws IOException {
this(socket.getChannel(), timeout);
}
/**
* Same as SocketInputStream(socket.getChannel(), socket.getSoTimeout())
* :<br><br>
*
* Create a new input stream with the given timeout. If the timeout
* is zero, it will be treated as infinite timeout. The socket's
* channel will be configured to be non-blocking.
* @see SocketInputStream#SocketInputStream(ReadableByteChannel, long)
*
* @param socket should have a channel associated with it.
* @throws IOException raised on errors performing I/O.
*/
public SocketInputStream(Socket socket) throws IOException {
this(socket.getChannel(), socket.getSoTimeout());
}
@Override
public int read() throws IOException {
/* Allocation can be removed if required.
* probably no need to optimize or encourage single byte read.
*/
byte[] buf = new byte[1];
int ret = read(buf, 0, 1);
if (ret > 0) {
return (int)(buf[0] & 0xff);
}
if (ret != -1) {
// unexpected
throw new IOException("Could not read from stream");
}
return ret;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
return read(ByteBuffer.wrap(b, off, len));
}
@Override
public synchronized void close() throws IOException {
/* close the channel since Socket.getInputStream().close()
* closes the socket.
*/
reader.channel.close();
reader.close();
}
/**
* @return Returns underlying channel used by inputstream.
* This is useful in certain cases like channel for
* {@link FileChannel#transferFrom(ReadableByteChannel, long, long)}.
*/
public ReadableByteChannel getChannel() {
return reader.channel;
}
//ReadableByteChannel interface
@Override
public boolean isOpen() {
return reader.isOpen();
}
@Override
public int read(ByteBuffer dst) throws IOException {
return reader.doIO(dst, SelectionKey.OP_READ);
}
/**
* waits for the underlying channel to be ready for reading.
* The timeout specified for this stream applies to this wait.
*
* @throws SocketTimeoutException
* if select on the channel times out.
* @throws IOException
* if any other I/O error occurs.
*/
public void waitForReadable() throws IOException {
reader.waitForIO(SelectionKey.OP_READ);
}
public void setTimeout(long timeoutMs) {
reader.setTimeout(timeoutMs);
}
}
| Reader |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/conditions/Conditions_assertIs_Test.java | {
"start": 1408,
"end": 2224
} | class ____ extends ConditionsBaseTest {
@Test
void should_throw_error_if_Condition_is_null() {
assertThatNullPointerException().isThrownBy(() -> conditions.assertIs(someInfo(), actual, null))
.withMessage("The condition to evaluate should not be null");
}
@Test
void should_pass_if_Condition_is_met() {
condition.shouldMatch(true);
conditions.assertIs(someInfo(), actual, condition);
}
@Test
void should_fail_if_Condition_is_not_met() {
condition.shouldMatch(false);
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> conditions.assertIs(info, actual, condition));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBe(actual, condition));
}
}
| Conditions_assertIs_Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/store/AbstractFSNodeStore.java | {
"start": 1645,
"end": 3534
} | class ____<M> {
protected static final Logger LOG =
LoggerFactory.getLogger(AbstractFSNodeStore.class);
private StoreType storeType;
private FSDataOutputStream editlogOs;
private Path editLogPath;
private int replication;
private StoreSchema schema;
protected M manager;
protected Path fsWorkingPath;
protected FileSystem fs;
public AbstractFSNodeStore(StoreType storeType) {
this.storeType = storeType;
}
protected void initStore(Configuration conf, Path fsStorePath,
StoreSchema schma, M mgr) throws IOException {
this.schema = schma;
this.fsWorkingPath = fsStorePath;
this.manager = mgr;
initFileSystem(conf);
initNodeStoreRootDirectory(conf);
this.replication = conf.getInt(YarnConfiguration.FS_STORE_FILE_REPLICATION,
YarnConfiguration.DEFAULT_FS_STORE_FILE_REPLICATION);
}
private void initNodeStoreRootDirectory(Configuration conf) throws IOException {
// mkdir of root dir path with retry logic
int maxRetries = conf.getInt(YarnConfiguration.NODE_STORE_ROOT_DIR_NUM_RETRIES,
YarnConfiguration.NODE_STORE_ROOT_DIR_NUM_DEFAULT_RETRIES);
int retryCount = 0;
boolean success = false;
while (!success && retryCount <= maxRetries) {
try {
success = fs.mkdirs(fsWorkingPath);
} catch (IOException e) {
retryCount++;
if (retryCount > maxRetries) {
throw e;
}
try {
Thread.sleep(conf.getInt(YarnConfiguration.NODE_STORE_ROOT_DIR_RETRY_INTERVAL,
YarnConfiguration.NODE_STORE_ROOT_DIR_RETRY_DEFAULT_INTERVAL));
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
}
LOG.info("Created store directory :" + fsWorkingPath);
}
/**
* Filesystem store schema define the log name and mirror name.
*/
public static | AbstractFSNodeStore |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java | {
"start": 3739,
"end": 16126
} | class ____ extends FileStore {
public long usableSpace;
private final String desc;
MockFileStore(String desc) {
this.desc = desc;
}
@Override
public String type() {
return "mock";
}
@Override
public String name() {
return desc;
}
@Override
public String toString() {
return desc;
}
@Override
public boolean isReadOnly() {
return false;
}
@Override
public long getTotalSpace() throws IOException {
return usableSpace * 3;
}
@Override
public long getUsableSpace() throws IOException {
return usableSpace;
}
@Override
public long getUnallocatedSpace() throws IOException {
return usableSpace * 2;
}
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> type) {
return false;
}
@Override
public boolean supportsFileAttributeView(String name) {
return false;
}
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> type) {
return null;
}
@Override
public Object getAttribute(String attribute) throws IOException {
return null;
}
}
static void createFakeShard(ShardPath path) throws IOException {
Files.createDirectories(path.resolveIndex().getParent());
}
public void testSelectNewPathForShard() throws Exception {
Path path = PathUtils.get(createTempDir().toString());
// Use 2 data paths:
String[] paths = new String[] { path.resolve("a").toString(), path.resolve("b").toString() };
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), path)
.putList(Environment.PATH_DATA_SETTING.getKey(), paths)
.build();
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
// Make sure all our mocking above actually worked:
DataPath[] dataPaths = nodeEnv.dataPaths();
assertEquals(2, dataPaths.length);
assertEquals("mocka", dataPaths[0].fileStore.name());
assertEquals("mockb", dataPaths[1].fileStore.name());
// Path a has lots of free space, but b has little, so new shard should go to a:
aFileStore.usableSpace = 100000;
bFileStore.usableSpace = 1000;
ShardId shardId = new ShardId("index", "_na_", 0);
ShardPath result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, Collections.<Path, Integer>emptyMap());
assertTrue(result.getDataPath().toString().contains(aPathPart));
// Test the reverse: b has lots of free space, but a has little, so new shard should go to b:
aFileStore.usableSpace = 1000;
bFileStore.usableSpace = 100000;
shardId = new ShardId("index", "_na_", 0);
result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, Collections.<Path, Integer>emptyMap());
assertTrue(result.getDataPath().toString().contains(bPathPart));
// Now a and be have equal usable space; we allocate two shards to the node, and each should go to different paths:
aFileStore.usableSpace = 100000;
bFileStore.usableSpace = 100000;
Map<Path, Integer> dataPathToShardCount = new HashMap<>();
ShardPath result1 = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, dataPathToShardCount);
createFakeShard(result1);
dataPathToShardCount.put(NodeEnvironment.shardStatePathToDataPath(result1.getDataPath()), 1);
ShardPath result2 = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, dataPathToShardCount);
createFakeShard(result2);
// #11122: this was the original failure: on a node with 2 disks that have nearly equal
// free space, we would always allocate all N incoming shards to the one path that
// had the most free space, never using the other drive unless new shards arrive
// after the first shards started using storage:
assertNotEquals(result1.getDataPath(), result2.getDataPath());
nodeEnv.close();
}
public void testSelectNewPathForShardEvenly() throws Exception {
Path path = PathUtils.get(createTempDir().toString());
// Use 2 data paths:
String[] paths = new String[] { path.resolve("a").toString(), path.resolve("b").toString() };
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), path)
.putList(Environment.PATH_DATA_SETTING.getKey(), paths)
.build();
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
// Make sure all our mocking above actually worked:
DataPath[] dataPaths = nodeEnv.dataPaths();
assertEquals(2, dataPaths.length);
assertEquals("mocka", dataPaths[0].fileStore.name());
assertEquals("mockb", dataPaths[1].fileStore.name());
// Path a has lots of free space, but b has little, so new shard should go to a:
aFileStore.usableSpace = 100000;
bFileStore.usableSpace = 10000;
ShardId shardId = new ShardId("index", "uid1", 0);
ShardPath result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, Collections.<Path, Integer>emptyMap());
createFakeShard(result);
// First shard should go to a
assertThat(result.getDataPath().toString(), containsString(aPathPart));
shardId = new ShardId("index", "uid1", 1);
result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, Collections.<Path, Integer>emptyMap());
createFakeShard(result);
// Second shard should go to b
assertThat(result.getDataPath().toString(), containsString(bPathPart));
Map<Path, Integer> dataPathToShardCount = new HashMap<>();
shardId = new ShardId("index2", "uid2", 0);
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(
"index2",
Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 3).build()
);
ShardPath result1 = ShardPath.selectNewPathForShard(nodeEnv, shardId, idxSettings, 100, dataPathToShardCount);
createFakeShard(result1);
dataPathToShardCount.put(NodeEnvironment.shardStatePathToDataPath(result1.getDataPath()), 1);
shardId = new ShardId("index2", "uid2", 1);
ShardPath result2 = ShardPath.selectNewPathForShard(nodeEnv, shardId, idxSettings, 100, dataPathToShardCount);
createFakeShard(result2);
dataPathToShardCount.put(NodeEnvironment.shardStatePathToDataPath(result2.getDataPath()), 1);
shardId = new ShardId("index2", "uid2", 2);
ShardPath result3 = ShardPath.selectNewPathForShard(nodeEnv, shardId, idxSettings, 100, dataPathToShardCount);
createFakeShard(result3);
// 2 shards go to 'a' and 1 to 'b'
assertThat(result1.getDataPath().toString(), containsString(aPathPart));
assertThat(result2.getDataPath().toString(), containsString(bPathPart));
assertThat(result3.getDataPath().toString(), containsString(aPathPart));
nodeEnv.close();
}
public void testGettingPathWithMostFreeSpace() throws Exception {
Path path = PathUtils.get(createTempDir().toString());
// Use 2 data paths:
String[] paths = new String[] { path.resolve("a").toString(), path.resolve("b").toString() };
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), path)
.putList(Environment.PATH_DATA_SETTING.getKey(), paths)
.build();
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
aFileStore.usableSpace = 100000;
bFileStore.usableSpace = 1000;
assertThat(ShardPath.getPathWithMostFreeSpace(nodeEnv), equalTo(nodeEnv.dataPaths()[0]));
aFileStore.usableSpace = 10000;
bFileStore.usableSpace = 20000;
assertThat(ShardPath.getPathWithMostFreeSpace(nodeEnv), equalTo(nodeEnv.dataPaths()[1]));
nodeEnv.close();
}
public void testTieBreakWithMostShards() throws Exception {
Path path = PathUtils.get(createTempDir().toString());
// Use 2 data paths:
String[] paths = new String[] { path.resolve("a").toString(), path.resolve("b").toString() };
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), path)
.putList(Environment.PATH_DATA_SETTING.getKey(), paths)
.build();
NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings));
// Make sure all our mocking above actually worked:
DataPath[] dataPaths = nodeEnv.dataPaths();
assertEquals(2, dataPaths.length);
assertEquals("mocka", dataPaths[0].fileStore.name());
assertEquals("mockb", dataPaths[1].fileStore.name());
// Path a has lots of free space, but b has little, so new shard should go to a:
aFileStore.usableSpace = 100000;
bFileStore.usableSpace = 10000;
Map<Path, Integer> dataPathToShardCount = new HashMap<>();
ShardId shardId = new ShardId("index", "uid1", 0);
ShardPath result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, dataPathToShardCount);
createFakeShard(result);
// First shard should go to a
assertThat(result.getDataPath().toString(), containsString(aPathPart));
dataPathToShardCount.compute(NodeEnvironment.shardStatePathToDataPath(result.getDataPath()), (k, v) -> v == null ? 1 : v + 1);
shardId = new ShardId("index", "uid1", 1);
result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, dataPathToShardCount);
createFakeShard(result);
// Second shard should go to b
assertThat(result.getDataPath().toString(), containsString(bPathPart));
dataPathToShardCount.compute(NodeEnvironment.shardStatePathToDataPath(result.getDataPath()), (k, v) -> v == null ? 1 : v + 1);
shardId = new ShardId("index2", "uid3", 0);
result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, dataPathToShardCount);
createFakeShard(result);
// Shard for new index should go to a
assertThat(result.getDataPath().toString(), containsString(aPathPart));
dataPathToShardCount.compute(NodeEnvironment.shardStatePathToDataPath(result.getDataPath()), (k, v) -> v == null ? 1 : v + 1);
shardId = new ShardId("index2", "uid2", 0);
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(
"index2",
Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 3).build()
);
ShardPath result1 = ShardPath.selectNewPathForShard(nodeEnv, shardId, idxSettings, 100, dataPathToShardCount);
createFakeShard(result1);
dataPathToShardCount.compute(NodeEnvironment.shardStatePathToDataPath(result1.getDataPath()), (k, v) -> v == null ? 1 : v + 1);
shardId = new ShardId("index2", "uid2", 1);
ShardPath result2 = ShardPath.selectNewPathForShard(nodeEnv, shardId, idxSettings, 100, dataPathToShardCount);
createFakeShard(result2);
dataPathToShardCount.compute(NodeEnvironment.shardStatePathToDataPath(result2.getDataPath()), (k, v) -> v == null ? 1 : v + 1);
shardId = new ShardId("index2", "uid2", 2);
ShardPath result3 = ShardPath.selectNewPathForShard(nodeEnv, shardId, idxSettings, 100, dataPathToShardCount);
createFakeShard(result3);
// 2 shards go to 'b' and 1 to 'a'
assertThat(result1.getDataPath().toString(), containsString(bPathPart));
assertThat(result2.getDataPath().toString(), containsString(aPathPart));
assertThat(result3.getDataPath().toString(), containsString(bPathPart));
nodeEnv.close();
}
}
| MockFileStore |
java | apache__camel | components/camel-huawei/camel-huaweicloud-dms/src/test/java/org/apache/camel/component/huaweicloud/dms/CreateInstanceKafkaFunctional1Test.java | {
"start": 1360,
"end": 5576
} | class ____ extends CamelTestSupport {
private static final String ACCESS_KEY = "replace_this_with_access_key";
private static final String SECRET_KEY = "replace_this_with_secret_key";
private static final String PROJECT_ID = "replace_this_with_project_id";
private static final String REGION = "replace_this_with_region";
// new Kafka instance options: https://support.huaweicloud.com/en-us/api-kafka/kafka-api-180514002.html
private static final String NAME = "replace_this_with_name";
private static final String ENGINE_VERSION = "replace_this_with_engine_version";
private static final String SPECIFICATION = "replace_this_with_specification";
private static final String STORAGE_SPACE = "replace_this_with_storage_space";
private static final String PARTITION_NUM = "replace_this_with_partition_num";
private static final String VPC_ID = "replace_this_with_vpc_id";
private static final String SECURITY_GROUP_ID = "replace_this_with_security_group_id";
private static final String SUBNET_ID = "replace_this_with_subnet_id";
private static final String AVAILABLE_ZONE = "replace_this_with_available_zone";
private static final String PRODUCT_ID = "replace_this_with_product_id";
private static final String KAFKA_MANAGER_USER = "replace_this_with_kafka_manager_user";
private static final String KAFKA_MANAGER_PASSWORD = "replace_this_with_kafka_manager_password";
private static final String STORAGE_SPEC_CODE = "replace_this_with_storage_spec_code";
@BindToRegistry("serviceKeys")
ServiceKeys serviceKeys = new ServiceKeys(ACCESS_KEY, SECRET_KEY);
@BindToRegistry("availableZones")
String availableZones = AVAILABLE_ZONE;
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:operation")
.to("hwcloud-dms:createInstance?" +
"serviceKeys=#serviceKeys" +
"&projectId=" + PROJECT_ID +
"®ion=" + REGION +
"&ignoreSslVerification=true" +
"&engine=kafka" +
"&name=" + NAME +
"&engineVersion=" + ENGINE_VERSION +
"&specification=" + SPECIFICATION +
"&storageSpace=" + STORAGE_SPACE +
"&partitionNum=" + PARTITION_NUM +
"&vpcId=" + VPC_ID +
"&securityGroupId=" + SECURITY_GROUP_ID +
"&subnetId=" + SUBNET_ID +
"&availableZones=#availableZones" +
"&productId=" + PRODUCT_ID +
"&kafkaManagerUser=" + KAFKA_MANAGER_USER +
"&kafkaManagerPassword=" + KAFKA_MANAGER_PASSWORD +
"&storageSpecCode=" + STORAGE_SPEC_CODE)
.log("Operation successful")
.to("log:LOG?showAll=true")
.to("mock:operation_result");
}
};
}
/**
* The following test cases should be manually enabled to perform test against the actual HuaweiCloud DMS server
* with real user credentials. To perform this test, manually comment out the @Ignore annotation and enter relevant
* service parameters in the placeholders above (static variables of this test class)
*
* @throws Exception
*/
@Disabled("Manually enable this once you configure the parameters in the placeholders above")
@Test
public void testOperation() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:operation_result");
mock.expectedMinimumMessageCount(1);
template.sendBody("direct:operation", null);
Exchange responseExchange = mock.getExchanges().get(0);
mock.assertIsSatisfied();
assertNotNull(responseExchange.getIn().getBody(String.class));
assertTrue(responseExchange.getIn().getBody(String.class).length() > 0);
}
}
| CreateInstanceKafkaFunctional1Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ExtendsAutoValueTest.java | {
"start": 4893,
"end": 4950
} | class ____ {
@AutoBuilder
| MyClass |
java | apache__flink | flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Predicates.java | {
"start": 8064,
"end": 8211
} | class ____ cannot be null");
}
if (fqClassName.trim().isEmpty()) {
throw new IllegalArgumentException("Fully qualified | name |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3983PluginResolutionFromProfileReposTest.java | {
"start": 1043,
"end": 1654
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Test that plugins can be resolved from remote plugin repositories defined by (active) profiles in the POM.
*
* @throws Exception in case of failure
*/
@Test
public void testitFromPom() throws Exception {
// requiresMavenVersion("[2.0,3.0-alpha-1),[3.0-alpha-3,)");
File testDir = extractResources("/mng-3983/test-1");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setForkJvm(true); // Don't lock up plugin files in | MavenITmng3983PluginResolutionFromProfileReposTest |
java | spring-projects__spring-boot | module/spring-boot-security/src/main/java/org/springframework/boot/security/autoconfigure/actuate/web/servlet/EndpointRequest.java | {
"start": 15327,
"end": 18374
} | class ____ extends AbstractRequestMatcher {
private final WebServerNamespace webServerNamespace;
private final List<Object> endpoints;
private final @Nullable HttpMethod httpMethod;
AdditionalPathsEndpointRequestMatcher(WebServerNamespace webServerNamespace, String... endpoints) {
this(webServerNamespace, Arrays.asList((Object[]) endpoints), null);
}
AdditionalPathsEndpointRequestMatcher(WebServerNamespace webServerNamespace, Class<?>... endpoints) {
this(webServerNamespace, Arrays.asList((Object[]) endpoints), null);
}
private AdditionalPathsEndpointRequestMatcher(WebServerNamespace webServerNamespace, List<Object> endpoints,
@Nullable HttpMethod httpMethod) {
Assert.notNull(webServerNamespace, "'webServerNamespace' must not be null");
Assert.notNull(endpoints, "'endpoints' must not be null");
Assert.notEmpty(endpoints, "'endpoints' must not be empty");
this.webServerNamespace = webServerNamespace;
this.endpoints = endpoints;
this.httpMethod = httpMethod;
}
/**
* Restricts the matcher to only consider requests with a particular HTTP method.
* @param httpMethod the HTTP method to include
* @return a copy of the matcher further restricted to only match requests with
* the specified HTTP method
*/
public AdditionalPathsEndpointRequestMatcher withHttpMethod(HttpMethod httpMethod) {
return new AdditionalPathsEndpointRequestMatcher(this.webServerNamespace, this.endpoints, httpMethod);
}
@Override
protected boolean ignoreApplicationContext(WebApplicationContext applicationContext,
ManagementPortType managementPortType) {
return !hasWebServerNamespace(applicationContext, this.webServerNamespace);
}
@Override
protected RequestMatcher createDelegate(WebApplicationContext context,
RequestMatcherFactory requestMatcherFactory) {
PathMappedEndpoints endpoints = context.getBean(PathMappedEndpoints.class);
RequestMatcherProvider matcherProvider = getRequestMatcherProvider(context);
Set<String> paths = this.endpoints.stream()
.filter(Objects::nonNull)
.map(this::getEndpointId)
.flatMap((endpointId) -> streamAdditionalPaths(endpoints, endpointId))
.collect(Collectors.toCollection(LinkedHashSet::new));
List<RequestMatcher> delegateMatchers = getDelegateMatchers(requestMatcherFactory, matcherProvider, paths,
this.httpMethod);
return (!CollectionUtils.isEmpty(delegateMatchers)) ? new OrRequestMatcher(delegateMatchers)
: EMPTY_MATCHER;
}
private Stream<String> streamAdditionalPaths(PathMappedEndpoints pathMappedEndpoints, EndpointId endpointId) {
return pathMappedEndpoints.getAdditionalPaths(this.webServerNamespace, endpointId).stream();
}
@Override
public String toString() {
return String.format("AdditionalPathsEndpointRequestMatcher endpoints=%s, webServerNamespace=%s",
toString(this.endpoints, ""), this.webServerNamespace);
}
}
/**
* Factory used to create a {@link RequestMatcher}.
*/
private static final | AdditionalPathsEndpointRequestMatcher |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/utils/SerializeSecurityConfiguratorTest.java | {
"start": 1587,
"end": 23018
} | class ____ {
@Test
void test() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertTrue(ssm.getAllowedPrefix().contains("java.util.HashMap"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.example.DemoInterface"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.sun.Interface1"));
Assertions.assertTrue(ssm.getDisAllowedPrefix().contains("com.exampletest.DemoInterface"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.sun.Interface2"));
Assertions.assertEquals(AllowClassNotifyListener.DEFAULT_STATUS, ssm.getCheckStatus());
frameworkModel.destroy();
}
@Test
void testStatus1() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
ApplicationConfig applicationConfig = new ApplicationConfig("Test");
applicationConfig.setSerializeCheckStatus(SerializeCheckStatus.DISABLE.name());
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertEquals(SerializeCheckStatus.DISABLE, ssm.getCheckStatus());
frameworkModel.destroy();
}
@Test
void testStatus2() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
ApplicationConfig applicationConfig = new ApplicationConfig("Test");
applicationConfig.setSerializeCheckStatus(SerializeCheckStatus.WARN.name());
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertEquals(SerializeCheckStatus.WARN, ssm.getCheckStatus());
frameworkModel.destroy();
}
@Test
void testStatus3() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
ApplicationConfig applicationConfig = new ApplicationConfig("Test");
applicationConfig.setSerializeCheckStatus(SerializeCheckStatus.STRICT.name());
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertEquals(SerializeCheckStatus.STRICT, ssm.getCheckStatus());
frameworkModel.destroy();
}
@Test
void testStatus4() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SystemPropertyConfigUtils.setSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_OPEN_CHECK, "false");
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertEquals(SerializeCheckStatus.DISABLE, ssm.getCheckStatus());
SystemPropertyConfigUtils.clearSystemProperty(CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_OPEN_CHECK);
frameworkModel.destroy();
}
@Test
void testStatus5() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SystemPropertyConfigUtils.setSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_BLOCK_ALL, "true");
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertEquals(SerializeCheckStatus.STRICT, ssm.getCheckStatus());
SystemPropertyConfigUtils.clearSystemProperty(CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_BLOCK_ALL);
frameworkModel.destroy();
}
@Test
void testConfig1() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SystemPropertyConfigUtils.setSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_ALLOWED_LIST, "test.package1, test.package2, ,");
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertTrue(ssm.getAllowedPrefix().contains("test.package1"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("test.package2"));
SystemPropertyConfigUtils.clearSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_ALLOWED_LIST);
frameworkModel.destroy();
}
@Test
void testConfig2() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SystemPropertyConfigUtils.setSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_BLOCKED_LIST, "test.package1, test.package2, ,");
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertTrue(ssm.getDisAllowedPrefix().contains("test.package1"));
Assertions.assertTrue(ssm.getDisAllowedPrefix().contains("test.package2"));
SystemPropertyConfigUtils.clearSystemProperty(CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_BLOCK_ALL);
frameworkModel.destroy();
}
@Test
void testConfig3() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SystemPropertyConfigUtils.setSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_ALLOWED_LIST, "test.package1, test.package2, ,");
SystemPropertyConfigUtils.setSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_BLOCKED_LIST, "test.package1, test.package2, ,");
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertTrue(ssm.getAllowedPrefix().contains("test.package1"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("test.package2"));
SystemPropertyConfigUtils.clearSystemProperty(
CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_ALLOWED_LIST);
SystemPropertyConfigUtils.clearSystemProperty(CommonConstants.DubboProperty.DUBBO_CLASS_DESERIALIZE_BLOCK_ALL);
frameworkModel.destroy();
}
@Test
void testSerializable1() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ApplicationConfig applicationConfig = new ApplicationConfig("Test");
applicationConfig.setCheckSerializable(false);
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
ModuleModel moduleModel = applicationModel.newModule();
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertFalse(ssm.isCheckSerializable());
frameworkModel.destroy();
}
@Test
void testSerializable2() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
Assertions.assertTrue(ssm.isCheckSerializable());
frameworkModel.destroy();
}
@Test
void testGeneric() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
serializeSecurityConfigurator.registerInterface(DemoService4.class);
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.service.DemoService4"));
frameworkModel.destroy();
}
@Test
void testGenericClass() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
serializeSecurityConfigurator.registerInterface(UserService.class);
Assertions.assertTrue(ssm.getAllowedPrefix().contains(UserService.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(Service.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(Params.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(User.class.getName()));
frameworkModel.destroy();
}
@Test
void testRegister1() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
serializeSecurityConfigurator.registerInterface(DemoService1.class);
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.service.DemoService1"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo1"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo2"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo3"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo4"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo5"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo6"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo7"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo8"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Simple"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(List.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(Set.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(Map.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(LinkedList.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(Vector.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(HashSet.class.getName()));
frameworkModel.destroy();
}
@Test
void testRegister2() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
serializeSecurityConfigurator.registerInterface(DemoService2.class);
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.service.DemoService2"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo1"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo2"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo3"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo4"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo5"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo6"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo7"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Demo8"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.pojo.Simple"));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(List.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(Set.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(Map.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(LinkedList.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(Vector.class.getName()));
Assertions.assertTrue(ssm.getAllowedPrefix().contains(HashSet.class.getName()));
frameworkModel.destroy();
}
@Test
void testRegister3() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
ApplicationConfig applicationConfig = new ApplicationConfig("Test");
applicationConfig.setAutoTrustSerializeClass(false);
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
serializeSecurityConfigurator.registerInterface(DemoService1.class);
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.service.DemoService1"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Demo1"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Demo2"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Demo3"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Demo4"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Demo5"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Demo6"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Demo7"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Demo8"));
Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.pojo.Simple"));
frameworkModel.destroy();
}
@Test
void testRegister4() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
ApplicationConfig applicationConfig = new ApplicationConfig("Test");
applicationConfig.setTrustSerializeClassLevel(4);
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
serializeSecurityConfigurator.registerInterface(DemoService3.class);
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.service.deep1.deep2."));
frameworkModel.destroy();
}
@Test
void testRegister5() {
FrameworkModel frameworkModel = new FrameworkModel();
ApplicationModel applicationModel = frameworkModel.newApplication();
ModuleModel moduleModel = applicationModel.newModule();
ApplicationConfig applicationConfig = new ApplicationConfig("Test");
applicationConfig.setTrustSerializeClassLevel(10);
applicationModel.getApplicationConfigManager().setApplication(applicationConfig);
SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class);
SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel);
serializeSecurityConfigurator.onAddClassLoader(
moduleModel, Thread.currentThread().getContextClassLoader());
serializeSecurityConfigurator.registerInterface(DemoService3.class);
Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.service.deep1.deep2.deep3.DemoService3"));
frameworkModel.destroy();
}
}
| SerializeSecurityConfiguratorTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/speculate/TestDataStatistics.java | {
"start": 952,
"end": 2656
} | class ____ {
private static final double TOL = 0.001;
@Test
public void testEmptyDataStatistics() throws Exception {
DataStatistics statistics = new DataStatistics();
assertEquals(0, statistics.count(), TOL);
assertEquals(0, statistics.mean(), TOL);
assertEquals(0, statistics.var(), TOL);
assertEquals(0, statistics.std(), TOL);
assertEquals(0, statistics.outlier(1.0f), TOL);
}
@Test
public void testSingleEntryDataStatistics() throws Exception {
DataStatistics statistics = new DataStatistics(17.29);
assertEquals(1, statistics.count(), TOL);
assertEquals(17.29, statistics.mean(), TOL);
assertEquals(0, statistics.var(), TOL);
assertEquals(0, statistics.std(), TOL);
assertEquals(17.29, statistics.outlier(1.0f), TOL);
}
@Test
public void testMultiEntryDataStatistics() throws Exception {
DataStatistics statistics = new DataStatistics();
statistics.add(17);
statistics.add(29);
assertEquals(2, statistics.count(), TOL);
assertEquals(23.0, statistics.mean(), TOL);
assertEquals(36.0, statistics.var(), TOL);
assertEquals(6.0, statistics.std(), TOL);
assertEquals(29.0, statistics.outlier(1.0f), TOL);
}
@Test
public void testUpdateStatistics() throws Exception {
DataStatistics statistics = new DataStatistics(17);
statistics.add(29);
assertEquals(2, statistics.count(), TOL);
assertEquals(23.0, statistics.mean(), TOL);
assertEquals(36.0, statistics.var(), TOL);
statistics.updateStatistics(17, 29);
assertEquals(2, statistics.count(), TOL);
assertEquals(29.0, statistics.mean(), TOL);
assertEquals(0.0, statistics.var(), TOL);
}
}
| TestDataStatistics |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/context/runner/ContextConsumer.java | {
"start": 1089,
"end": 1946
} | interface ____<C extends ApplicationContext> {
/**
* Performs this operation on the supplied {@code context}.
* @param context the application context to consume
* @throws Throwable any exception that might occur in assertions
*/
void accept(C context) throws Throwable;
/**
* Returns a composed {@code ContextConsumer} that performs, in sequence, this
* operation followed by the {@code after} operation.
* @param after the operation to perform after this operation
* @return a composed {@code ContextConsumer} that performs in sequence this operation
* followed by the {@code after} operation
* @since 2.6.0
*/
default ContextConsumer<C> andThen(ContextConsumer<? super C> after) {
Assert.notNull(after, "'after' must not be null");
return (context) -> {
accept(context);
after.accept(context);
};
}
}
| ContextConsumer |
java | spring-projects__spring-security | webauthn/src/main/java/org/springframework/security/web/webauthn/jackson/COSEAlgorithmIdentifierDeserializer.java | {
"start": 1133,
"end": 1705
} | class ____ extends StdDeserializer<COSEAlgorithmIdentifier> {
COSEAlgorithmIdentifierDeserializer() {
super(COSEAlgorithmIdentifier.class);
}
@Override
public @Nullable COSEAlgorithmIdentifier deserialize(JsonParser parser, DeserializationContext ctxt)
throws JacksonException {
Long transportValue = parser.readValueAs(Long.class);
for (COSEAlgorithmIdentifier identifier : COSEAlgorithmIdentifier.values()) {
if (identifier.getValue() == transportValue.longValue()) {
return identifier;
}
}
return null;
}
}
| COSEAlgorithmIdentifierDeserializer |
java | spring-projects__spring-boot | module/spring-boot-restclient/src/test/java/org/springframework/boot/restclient/autoconfigure/service/HttpServiceClientAutoConfigurationTests.java | {
"start": 12857,
"end": 13083
} | class ____ {
@Bean
RestClientCustomizer restClientCustomizer() {
return (builder) -> builder.defaultHeader("customized", "true");
}
}
@Configuration(proxyBeanMethods = false)
static | RestClientCustomizerConfiguration |
java | google__guice | extensions/assistedinject/src/com/google/inject/assistedinject/FactoryModuleBuilder.java | {
"start": 1064,
"end": 1246
} | interface ____ methods return the constructed type, or any of its supertypes. The
* method's parameters are the arguments required to build the constructed type.
*
* <pre>public | whose |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/proxy/concrete/ConcreteProxyTest.java | {
"start": 17163,
"end": 17604
} | class ____ {
@Id
private Long id;
@ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.PERSIST)
private UnionBase union;
public UnionParent() {
}
public UnionParent(Long id, UnionBase union) {
this.id = id;
this.union = union;
}
public UnionBase getUnion() {
return union;
}
}
@Entity(name = "UnionBase")
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
@ConcreteProxy
public static | UnionParent |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/model/BindingGraph.java | {
"start": 14354,
"end": 15083
} | interface ____ extends Edge {
/** The subcomponent factory method element. */
DaggerExecutableElement factoryMethod();
}
/**
* An edge that represents the link between a parent component and a child subcomponent implied by
* a subcomponent creator ({@linkplain dagger.Subcomponent.Builder builder} or {@linkplain
* dagger.Subcomponent.Factory factory}) binding.
*
* <p>The {@linkplain com.google.common.graph.EndpointPair#source() source node} of this edge is a
* {@link Binding} for the subcomponent creator {@link Key} and the {@linkplain
* com.google.common.graph.EndpointPair#target() target node} is a {@link ComponentNode} for the
* child subcomponent.
*/
public | ChildFactoryMethodEdge |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/size/CriteriaSelectSizeCollectionTest.java | {
"start": 1187,
"end": 2431
} | class ____ {
@BeforeEach
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
Customer customer = new Customer( "1", "Phil" );
Alias alias = new Alias( "2", "p" );
customer.addAlias( alias );
entityManager.persist( customer );
}
);
}
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
public void testSelectCollectionSize(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
CriteriaQuery<Integer> query = criteriaBuilder.createQuery( Integer.class );
Root<Customer> customer = query.from( Customer.class );
Expression<Integer> aliases = criteriaBuilder.size( customer.get( "aliases" ) );
query.select( aliases );
query.where( criteriaBuilder.equal( customer.get( "id" ), "1" ) );
TypedQuery<Integer> tq = entityManager.createQuery( query );
Integer size = tq.getSingleResult();
assertThat( size, is( 1 ) );
}
);
}
@Entity(name = "Customer")
@Table(name = "CUSTOMER_TABLE")
public static | CriteriaSelectSizeCollectionTest |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/FeatureSpecificTestSuiteBuilder.java | {
"start": 5948,
"end": 6115
} | class ____
List<Class<? extends AbstractTester>> testers = getTesters();
TestSuite suite = new TestSuite(name);
for (@SuppressWarnings("rawtypes") // | literals |
java | quarkusio__quarkus | integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/Employee.java | {
"start": 301,
"end": 1030
} | class ____ extends AbstractEntity {
@Column(name = "user_id")
private String userId;
@Column(name = "first_name")
private String firstName;
@Column(name = "last_name")
private String lastName;
@Column(unique = true)
private String email;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "team_id", nullable = false)
private Team belongsToTeam;
public String getUserId() {
return userId;
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
public Team getBelongsToTeam() {
return belongsToTeam;
}
@Entity
@Table(name = "team")
public static | Employee |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java | {
"start": 14012,
"end": 15682
} | class ____ {
private RoleMapperExpression rules;
private List<String> roles;
private List<TemplateRoleName> roleTemplates;
private Map<String, Object> metadata = Collections.emptyMap();
private Boolean enabled;
Builder rules(RoleMapperExpression expression) {
this.rules = expression;
return this;
}
Builder roles(List<String> roles) {
this.roles = new ArrayList<>(roles);
return this;
}
Builder roleTemplates(List<TemplateRoleName> templates) {
this.roleTemplates = new ArrayList<>(templates);
return this;
}
Builder metadata(Map<String, Object> metadata) {
this.metadata = metadata;
return this;
}
Builder enabled(boolean enabled) {
this.enabled = enabled;
return this;
}
private ExpressionRoleMapping build(String name) {
if (roles == null && roleTemplates == null) {
throw missingField(name, Fields.ROLES);
}
if (rules == null) {
throw missingField(name, Fields.RULES);
}
if (enabled == null) {
throw missingField(name, Fields.ENABLED);
}
return new ExpressionRoleMapping(name, rules, roles, roleTemplates, metadata, enabled);
}
private static IllegalStateException missingField(String id, ParseField field) {
return new IllegalStateException("failed to parse role-mapping [" + id + "]. missing field [" + field + "]");
}
}
public | Builder |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/tvf/state/WindowMapState.java | {
"start": 1189,
"end": 5426
} | class ____<W, UV> implements WindowState<W> {
private final InternalMapState<RowData, W, RowData, UV> windowState;
public WindowMapState(InternalMapState<RowData, W, RowData, UV> windowState) {
this.windowState = windowState;
}
public void clear(W window) {
windowState.setCurrentNamespace(window);
windowState.clear();
}
/**
* Returns the current value associated with the given key.
*
* @param key The key of the mapping
* @return The value of the mapping with the given key
* @throws Exception Thrown if the system cannot access the state.
*/
public UV get(W window, RowData key) throws Exception {
windowState.setCurrentNamespace(window);
return windowState.get(key);
}
/**
* Associates a new value with the given key.
*
* @param key The key of the mapping
* @param value The new value of the mapping
* @throws Exception Thrown if the system cannot access the state.
*/
public void put(W window, RowData key, UV value) throws Exception {
windowState.setCurrentNamespace(window);
windowState.put(key, value);
}
/**
* Copies all of the mappings from the given map into the state.
*
* @param map The mappings to be stored in this state
* @throws Exception Thrown if the system cannot access the state.
*/
public void putAll(W window, Map<RowData, UV> map) throws Exception {
windowState.setCurrentNamespace(window);
windowState.putAll(map);
}
/**
* Deletes the mapping of the given key.
*
* @param key The key of the mapping
* @throws Exception Thrown if the system cannot access the state.
*/
public void remove(W window, RowData key) throws Exception {
windowState.setCurrentNamespace(window);
windowState.remove(key);
}
/**
* Returns whether there exists the given mapping.
*
* @param key The key of the mapping
* @return True if there exists a mapping whose key equals to the given key
* @throws Exception Thrown if the system cannot access the state.
*/
public boolean contains(W window, RowData key) throws Exception {
windowState.setCurrentNamespace(window);
return windowState.contains(key);
}
/**
* Returns all the mappings in the state.
*
* @return An iterable view of all the key-value pairs in the state.
* @throws Exception Thrown if the system cannot access the state.
*/
public Iterable<Map.Entry<RowData, UV>> entries(W window) throws Exception {
windowState.setCurrentNamespace(window);
return windowState.entries();
}
/**
* Returns all the keys in the state.
*
* @return An iterable view of all the keys in the state.
* @throws Exception Thrown if the system cannot access the state.
*/
public Iterable<RowData> keys(W window) throws Exception {
windowState.setCurrentNamespace(window);
return windowState.keys();
}
/**
* Returns all the values in the state.
*
* @return An iterable view of all the values in the state.
* @throws Exception Thrown if the system cannot access the state.
*/
public Iterable<UV> values(W window) throws Exception {
windowState.setCurrentNamespace(window);
return windowState.values();
}
/**
* Iterates over all the mappings in the state.
*
* @return An iterator over all the mappings in the state
* @throws Exception Thrown if the system cannot access the state.
*/
public Iterator<Map.Entry<RowData, UV>> iterator(W window) throws Exception {
windowState.setCurrentNamespace(window);
return windowState.iterator();
}
/**
* Returns true if this state contains no key-value mappings, otherwise false.
*
* @return True if this state contains no key-value mappings, otherwise false.
* @throws Exception Thrown if the system cannot access the state.
*/
public boolean isEmpty(W window) throws Exception {
windowState.setCurrentNamespace(window);
return windowState.isEmpty();
}
}
| WindowMapState |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/concurrent/ComponentMainThreadExecutorServiceAdapter.java | {
"start": 1393,
"end": 1652
} | class ____ a {@link ScheduledExecutorService} or {@link ScheduledExecutor} which shall be
* used as a {@link ComponentMainThreadExecutor}. It enhances the given executor with an assert that
* the current thread is the main thread of the executor.
*/
public | for |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Partitioner.java | {
"start": 1627,
"end": 1821
} | class ____ obtain the Job's
* configuration object, implement the {@link Configurable} interface.</p>
*
* @see Reducer
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract | to |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java | {
"start": 1141,
"end": 6415
} | class ____ extends ElasticsearchInternalServiceSettings {
public static final String NAME = "elastic_reranker_service_settings";
public static final String LONG_DOCUMENT_STRATEGY = "long_document_strategy";
public static final String MAX_CHUNKS_PER_DOC = "max_chunks_per_doc";
private static final TransportVersion ELASTIC_RERANKER_CHUNKING_CONFIGURATION = TransportVersion.fromName(
"elastic_reranker_chunking_configuration"
);
private final LongDocumentStrategy longDocumentStrategy;
private final Integer maxChunksPerDoc;
public static ElasticRerankerServiceSettings defaultEndpointSettings() {
return new ElasticRerankerServiceSettings(null, 1, RERANKER_ID, new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32));
}
public ElasticRerankerServiceSettings(
ElasticsearchInternalServiceSettings other,
LongDocumentStrategy longDocumentStrategy,
Integer maxChunksPerDoc
) {
super(other);
this.longDocumentStrategy = longDocumentStrategy;
this.maxChunksPerDoc = maxChunksPerDoc;
}
private ElasticRerankerServiceSettings(
Integer numAllocations,
int numThreads,
String modelId,
AdaptiveAllocationsSettings adaptiveAllocationsSettings
) {
super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null);
this.longDocumentStrategy = null;
this.maxChunksPerDoc = null;
}
protected ElasticRerankerServiceSettings(
Integer numAllocations,
int numThreads,
String modelId,
AdaptiveAllocationsSettings adaptiveAllocationsSettings,
LongDocumentStrategy longDocumentStrategy,
Integer maxChunksPerDoc
) {
super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings, null);
this.longDocumentStrategy = longDocumentStrategy;
this.maxChunksPerDoc = maxChunksPerDoc;
}
public ElasticRerankerServiceSettings(StreamInput in) throws IOException {
super(in);
if (in.getTransportVersion().supports(ELASTIC_RERANKER_CHUNKING_CONFIGURATION)) {
this.longDocumentStrategy = in.readOptionalEnum(LongDocumentStrategy.class);
this.maxChunksPerDoc = in.readOptionalInt();
} else {
this.longDocumentStrategy = null;
this.maxChunksPerDoc = null;
}
}
/**
* Parse the ElasticRerankerServiceSettings from map and validate the setting values.
*
* If required setting are missing or the values are invalid an
* {@link ValidationException} is thrown.
*
* @param map Source map containing the config
* @return Parsed and validated service settings
*/
public static ElasticRerankerServiceSettings fromMap(Map<String, Object> map) {
ValidationException validationException = new ValidationException();
var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException);
LongDocumentStrategy longDocumentStrategy = extractOptionalEnum(
map,
LONG_DOCUMENT_STRATEGY,
ModelConfigurations.SERVICE_SETTINGS,
LongDocumentStrategy::fromString,
EnumSet.allOf(LongDocumentStrategy.class),
validationException
);
Integer maxChunksPerDoc = extractOptionalPositiveInteger(
map,
MAX_CHUNKS_PER_DOC,
ModelConfigurations.SERVICE_SETTINGS,
validationException
);
if (maxChunksPerDoc != null && (longDocumentStrategy == null || longDocumentStrategy == LongDocumentStrategy.TRUNCATE)) {
validationException.addValidationError(
"The [" + MAX_CHUNKS_PER_DOC + "] setting requires [" + LONG_DOCUMENT_STRATEGY + "] to be set to [chunk]"
);
}
if (validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
return new ElasticRerankerServiceSettings(baseSettings.build(), longDocumentStrategy, maxChunksPerDoc);
}
public LongDocumentStrategy getLongDocumentStrategy() {
return longDocumentStrategy;
}
public Integer getMaxChunksPerDoc() {
return maxChunksPerDoc;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getTransportVersion().supports(ELASTIC_RERANKER_CHUNKING_CONFIGURATION)) {
out.writeOptionalEnum(longDocumentStrategy);
out.writeOptionalInt(maxChunksPerDoc);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
addInternalSettingsToXContent(builder, params);
if (longDocumentStrategy != null) {
builder.field(LONG_DOCUMENT_STRATEGY, longDocumentStrategy.strategyName);
}
if (maxChunksPerDoc != null) {
builder.field(MAX_CHUNKS_PER_DOC, maxChunksPerDoc);
}
builder.endObject();
return builder;
}
@Override
public String getWriteableName() {
return ElasticRerankerServiceSettings.NAME;
}
public | ElasticRerankerServiceSettings |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-jsonb/runtime/src/main/java/io/quarkus/resteasy/jsonb/vertx/VertxJson.java | {
"start": 4902,
"end": 5247
} | class ____ implements JsonbDeserializer<JsonArray> {
@Override
public JsonArray deserialize(JsonParser parser, DeserializationContext context, Type type) {
JsonArray object = new JsonArray();
copy(object, parser.getArray());
return object;
}
}
public static | JsonArrayDeserializer |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/collection/EntityMapTest.java | {
"start": 2498,
"end": 2582
} | class ____ extends AbstractEntity {
}
@Entity(name = "C")
@Audited
public static | B |
java | spring-projects__spring-boot | module/spring-boot-cache/src/test/java/org/springframework/boot/cache/actuate/endpoint/CachesEndpointWebIntegrationTests.java | {
"start": 1415,
"end": 3432
} | class ____ {
@WebEndpointTest
void allCaches(WebTestClient client) {
client.get()
.uri("/actuator/caches")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("cacheManagers.one.caches.a.target")
.isEqualTo(ConcurrentHashMap.class.getName())
.jsonPath("cacheManagers.one.caches.b.target")
.isEqualTo(ConcurrentHashMap.class.getName())
.jsonPath("cacheManagers.two.caches.a.target")
.isEqualTo(ConcurrentHashMap.class.getName())
.jsonPath("cacheManagers.two.caches.c.target")
.isEqualTo(ConcurrentHashMap.class.getName());
}
@WebEndpointTest
void namedCache(WebTestClient client) {
client.get()
.uri("/actuator/caches/b")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("name")
.isEqualTo("b")
.jsonPath("cacheManager")
.isEqualTo("one")
.jsonPath("target")
.isEqualTo(ConcurrentHashMap.class.getName());
}
@WebEndpointTest
void namedCacheWithUnknownName(WebTestClient client) {
client.get().uri("/actuator/caches/does-not-exist").exchange().expectStatus().isNotFound();
}
@WebEndpointTest
void namedCacheWithNonUniqueName(WebTestClient client) {
client.get().uri("/actuator/caches/a").exchange().expectStatus().isBadRequest();
}
@WebEndpointTest
void clearNamedCache(WebTestClient client, ApplicationContext context) {
Cache b = context.getBean("one", CacheManager.class).getCache("b");
assertThat(b).isNotNull();
b.put("test", "value");
client.delete().uri("/actuator/caches/b").exchange().expectStatus().isNoContent();
assertThat(b.get("test")).isNull();
}
@WebEndpointTest
void cleanNamedCacheWithUnknownName(WebTestClient client) {
client.delete().uri("/actuator/caches/does-not-exist").exchange().expectStatus().isNotFound();
}
@WebEndpointTest
void clearNamedCacheWithNonUniqueName(WebTestClient client) {
client.get().uri("/actuator/caches/a").exchange().expectStatus().isBadRequest();
}
@Configuration(proxyBeanMethods = false)
static | CachesEndpointWebIntegrationTests |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/block/OracleBlockTest19.java | {
"start": 946,
"end": 2915
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = "BEGIN\n" +
" EXECUTE IMMEDIATE 'DROP SEQUENCE properties_seq';\n" +
"EXCEPTION\n" +
" WHEN OTHERS THEN\n" +
" IF SQLCODE != -2289 THEN\n" +
" RAISE;\n" +
" END IF;\n" +
"END;";
List<SQLStatement> stmtList = SQLUtils.parseStatements(sql, JdbcConstants.ORACLE);
assertEquals(1, stmtList.size());
String result = SQLUtils.toOracleString(stmtList.get(0));
System.out.println(result);
assertEquals("BEGIN\n" +
"\tEXECUTE IMMEDIATE 'DROP SEQUENCE properties_seq';\n" +
"EXCEPTION\n" +
"\tWHEN OTHERS THEN\n" +
"\t\tIF SQLCODE != -2289 THEN\n" +
"\t\t\tRAISE;\n" +
"\t\tEND IF;\n" +
"END;", result);
assertEquals(1, stmtList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
for (SQLStatement statement : stmtList) {
statement.accept(visitor);
}
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(0, visitor.getTables().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("employees")));
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertEquals(0, visitor.getRelationships().size());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("employees", "salary")));
}
}
| OracleBlockTest19 |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/MapKeyTypeAnnotation.java | {
"start": 610,
"end": 2203
} | class ____ implements MapKeyType {
private java.lang.Class<? extends org.hibernate.usertype.UserType<?>> value;
private org.hibernate.annotations.Parameter[] parameters;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public MapKeyTypeAnnotation(ModelsContext modelContext) {
this.parameters = new org.hibernate.annotations.Parameter[0];
}
/**
* Used in creating annotation instances from JDK variant
*/
public MapKeyTypeAnnotation(MapKeyType annotation, ModelsContext modelContext) {
this.value = annotation.value();
this.parameters = extractJdkValue( annotation, HibernateAnnotations.MAP_KEY_TYPE, "parameters", modelContext );
}
/**
* Used in creating annotation instances from Jandex variant
*/
public MapKeyTypeAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (Class<? extends org.hibernate.usertype.UserType<?>>) attributeValues.get( "value" );
this.parameters = (org.hibernate.annotations.Parameter[]) attributeValues.get( "parameters" );
}
@Override
public Class<? extends Annotation> annotationType() {
return MapKeyType.class;
}
@Override
public java.lang.Class<? extends org.hibernate.usertype.UserType<?>> value() {
return value;
}
public void value(java.lang.Class<? extends org.hibernate.usertype.UserType<?>> value) {
this.value = value;
}
@Override
public org.hibernate.annotations.Parameter[] parameters() {
return parameters;
}
public void parameters(org.hibernate.annotations.Parameter[] value) {
this.parameters = value;
}
}
| MapKeyTypeAnnotation |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/artifact/resolver/DefaultArtifactResolver.java | {
"start": 23376,
"end": 25578
} | class ____ implements Runnable {
private final ClassLoader classLoader;
private final CountDownLatch latch;
private final Artifact artifact;
private final RepositorySystemSession session;
private final List<ArtifactRepository> remoteRepositories;
private final ArtifactResolutionResult result;
ResolveTask(
ClassLoader classLoader,
CountDownLatch latch,
Artifact artifact,
RepositorySystemSession session,
List<ArtifactRepository> remoteRepositories,
ArtifactResolutionResult result) {
this.classLoader = classLoader;
this.latch = latch;
this.artifact = artifact;
this.session = session;
this.remoteRepositories = remoteRepositories;
this.result = result;
}
@Override
public void run() {
ClassLoader old = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(classLoader);
resolve(artifact, remoteRepositories, session);
} catch (ArtifactNotFoundException anfe) {
// These are cases where the artifact just isn't present in any of the remote repositories
// because it wasn't deployed, or it was deployed in the wrong place.
synchronized (result) {
result.addMissingArtifact(artifact);
}
} catch (ArtifactResolutionException e) {
// This is really a wagon TransferFailedException so something went wrong after we successfully
// retrieved the metadata.
synchronized (result) {
result.addErrorArtifactException(e);
}
} finally {
latch.countDown();
Thread.currentThread().setContextClassLoader(old);
}
}
}
@Override
public void dispose() {
if (executor instanceof ExecutorService executorService) {
executorService.shutdownNow();
}
}
}
| ResolveTask |
java | quarkusio__quarkus | extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/AutoSecurityRolesAllowedWithScopesTestCase.java | {
"start": 235,
"end": 1201
} | class ____ extends AutoSecurityRolesAllowedWithScopesTestBase {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(ResourceBean.class, OpenApiResourceSecuredAtClassLevel.class,
OpenApiResourceSecuredAtMethodLevel.class, OpenApiResourceSecuredAtMethodLevel2.class)
.addAsResource(
new StringAsset("quarkus.smallrye-openapi.security-scheme=oauth2-implicit\n"
+ "quarkus.smallrye-openapi.security-scheme-name=MyScheme\n"
+ "quarkus.smallrye-openapi.security-scheme-description=Authentication using MyScheme"),
"application.properties"));
@Test
void testAutoSecurityRequirement() {
testAutoSecurityRequirement("oauth2");
}
}
| AutoSecurityRolesAllowedWithScopesTestCase |
java | google__guice | core/src/com/google/inject/multibindings/MapKey.java | {
"start": 2024,
"end": 2381
} | interface ____ {
/**
* if {@code unwrapValue} is false, then the whole annotation will be the type and annotation
* instances will be the keys. If {@code unwrapValue} is true, the value() type of key type
* annotation will be the key type for injected map and the value instances will be the keys.
*/
boolean unwrapValue() default true;
}
| MapKey |
java | apache__kafka | raft/src/test/java/org/apache/kafka/raft/internals/BlockingMessageQueueTest.java | {
"start": 1188,
"end": 2143
} | class ____ {
@Test
public void testOfferAndPoll() {
BlockingMessageQueue queue = new BlockingMessageQueue();
assertTrue(queue.isEmpty());
assertNull(queue.poll(0));
RaftMessage message1 = Mockito.mock(RaftMessage.class);
queue.add(message1);
assertFalse(queue.isEmpty());
assertEquals(message1, queue.poll(0));
assertTrue(queue.isEmpty());
RaftMessage message2 = Mockito.mock(RaftMessage.class);
RaftMessage message3 = Mockito.mock(RaftMessage.class);
queue.add(message2);
queue.add(message3);
assertFalse(queue.isEmpty());
assertEquals(message2, queue.poll(0));
assertEquals(message3, queue.poll(0));
}
@Test
public void testWakeupFromPoll() {
BlockingMessageQueue queue = new BlockingMessageQueue();
queue.wakeup();
assertNull(queue.poll(Long.MAX_VALUE));
}
} | BlockingMessageQueueTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/StorageTypeParam.java | {
"start": 896,
"end": 1341
} | class ____ extends StringParam {
/** Parameter name. */
public static final String NAME = "storagetype";
/** Default parameter value. */
public static final String DEFAULT = "";
private static final Domain DOMAIN = new Domain(NAME, null);
public StorageTypeParam(final String str) {
super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
}
@Override
public String getName() {
return NAME;
}
}
| StorageTypeParam |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/hql/spi/SqmQueryImplementor.java | {
"start": 1250,
"end": 7690
} | interface ____<R> extends QueryImplementor<R>, SqmQuery<R>, NameableQuery {
@Override
NamedSqmQueryMemento<R> toMemento(String name);
@Override
ParameterMetadataImplementor getParameterMetadata();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// covariance
@Override
SqmQueryImplementor<R> setCacheMode(CacheMode cacheMode);
@Override
SqmQueryImplementor<R> setCacheable(boolean cacheable);
@Override
SqmQueryImplementor<R> setCacheRegion(String cacheRegion);
@Override
SqmQueryImplementor<R> setTimeout(int timeout);
@Override
SqmQueryImplementor<R> setFetchSize(int fetchSize);
@Override
SqmQueryImplementor<R> setReadOnly(boolean readOnly);
@Override
SqmQueryImplementor<R> applyGraph(@SuppressWarnings("rawtypes") RootGraph graph, GraphSemantic semantic);
@Override @Deprecated
default SqmQueryImplementor<R> applyFetchGraph(@SuppressWarnings("rawtypes") RootGraph graph) {
QueryImplementor.super.applyFetchGraph( graph );
return this;
}
@Override @Deprecated
default SqmQueryImplementor<R> applyLoadGraph(@SuppressWarnings("rawtypes") RootGraph graph) {
QueryImplementor.super.applyLoadGraph( graph );
return this;
}
@Override
SqmQueryImplementor<R> setComment(String comment);
@Override
SqmQueryImplementor<R> addQueryHint(String hint);
@Override @Deprecated
SqmQueryImplementor<R> setLockOptions(LockOptions lockOptions);
@Override
<T> SqmQueryImplementor<T> setTupleTransformer(TupleTransformer<T> transformer);
@Override
SqmQueryImplementor<R> setResultListTransformer(ResultListTransformer<R> transformer);
@Override @Deprecated(since = "5.2")
default <T> SqmQueryImplementor<T> setResultTransformer(ResultTransformer<T> transformer) {
return setTupleTransformer( transformer ).setResultListTransformer( transformer );
}
@Override @Deprecated(since = "7")
SqmQueryImplementor<R> setHibernateFlushMode(FlushMode flushMode);
@Override
SqmQueryImplementor<R> setQueryFlushMode(QueryFlushMode queryFlushMode);
@Override
SqmQueryImplementor<R> setMaxResults(int maxResults);
@Override
SqmQueryImplementor<R> setFirstResult(int startPosition);
@Override
SqmQueryImplementor<R> setHint(String hintName, Object value);
@Override @Deprecated(since = "7")
SqmQueryImplementor<R> setFlushMode(FlushModeType flushMode);
@Override
SqmQueryImplementor<R> setLockMode(LockModeType lockMode);
@Override
SqmQueryImplementor<R> setParameter(String name, Object value);
@Override
<P> SqmQueryImplementor<R> setParameter(String name, P value, Class<P> type);
@Override
<P> SqmQueryImplementor<R> setParameter(String name, P value, Type<P> type);
@Override @Deprecated
SqmQueryImplementor<R> setParameter(String name, Instant value, TemporalType temporalType);
@Override @Deprecated
SqmQueryImplementor<R> setParameter(String name, Calendar value, TemporalType temporalType);
@Override @Deprecated
SqmQueryImplementor<R> setParameter(String name, Date value, TemporalType temporalType);
@Override
SqmQueryImplementor<R> setParameter(int position, Object value);
@Override
<P> SqmQueryImplementor<R> setParameter(int position, P value, Class<P> type);
@Override
<P> SqmQueryImplementor<R> setParameter(int position, P value, Type<P> type);
@Override @Deprecated
SqmQueryImplementor<R> setParameter(int position, Instant value, TemporalType temporalType);
@Override @Deprecated
SqmQueryImplementor<R> setParameter(int position, Date value, TemporalType temporalType);
@Override @Deprecated
SqmQueryImplementor<R> setParameter(int position, Calendar value, TemporalType temporalType);
@Override
<T> SqmQueryImplementor<R> setParameter(QueryParameter<T> parameter, T value);
@Override
<P> SqmQueryImplementor<R> setParameter(QueryParameter<P> parameter, P value, Class<P> type);
@Override
<P> SqmQueryImplementor<R> setParameter(QueryParameter<P> parameter, P val, Type<P> type);
@Override
<T> SqmQueryImplementor<R> setParameter(Parameter<T> param, T value);
@Override @Deprecated
SqmQueryImplementor<R> setParameter(Parameter<Calendar> param, Calendar value, TemporalType temporalType);
@Override @Deprecated
SqmQueryImplementor<R> setParameter(Parameter<Date> param, Date value, TemporalType temporalType);
@Override
SqmQueryImplementor<R> setParameterList(String name, @SuppressWarnings("rawtypes") Collection values);
@Override
<P> SqmQueryImplementor<R> setParameterList(String name, Collection<? extends P> values, Class<P> javaType);
@Override
<P> SqmQueryImplementor<R> setParameterList(String name, Collection<? extends P> values, Type<P> type);
@Override
SqmQueryImplementor<R> setParameterList(String name, Object[] values);
@Override
<P> SqmQueryImplementor<R> setParameterList(String name, P[] values, Class<P> javaType);
@Override
<P> SqmQueryImplementor<R> setParameterList(String name, P[] values, Type<P> type);
@Override
SqmQueryImplementor<R> setParameterList(int position, @SuppressWarnings("rawtypes") Collection values);
@Override
<P> SqmQueryImplementor<R> setParameterList(int position, Collection<? extends P> values, Class<P> javaType);
@Override
<P> SqmQueryImplementor<R> setParameterList(int position, Collection<? extends P> values, Type<P> type);
@Override
SqmQueryImplementor<R> setParameterList(int position, Object[] values);
@Override
<P> SqmQueryImplementor<R> setParameterList(int position, P[] values, Class<P> javaType);
@Override
<P> SqmQueryImplementor<R> setParameterList(int position, P[] values, Type<P> type);
@Override
<P> SqmQueryImplementor<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values);
@Override
<P> SqmQueryImplementor<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Class<P> javaType);
@Override
<P> SqmQueryImplementor<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Type<P> type);
@Override
<P> SqmQueryImplementor<R> setParameterList(QueryParameter<P> parameter, P[] values);
@Override
<P> SqmQueryImplementor<R> setParameterList(QueryParameter<P> parameter, P[] values, Class<P> javaType);
@Override
<P> SqmQueryImplementor<R> setParameterList(QueryParameter<P> parameter, P[] values, Type<P> type);
@Override
SqmQueryImplementor<R> setProperties(Object bean);
@Override
SqmQueryImplementor<R> setProperties(@SuppressWarnings("rawtypes") Map bean);
}
| SqmQueryImplementor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cascade/Mouth.java | {
"start": 518,
"end": 718
} | class ____ {
@Id
@GeneratedValue
public Integer id;
@Column(name="mouth_size")
public int size;
@OneToMany(mappedBy = "mouth", cascade = { REMOVE, DETACH } )
public Collection<Tooth> teeth;
}
| Mouth |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/IteratingActionListenerTests.java | {
"start": 1079,
"end": 8451
} | class ____ extends ESTestCase {
public void testIteration() {
final int numberOfItems = scaledRandomIntBetween(1, 32);
final int numberOfIterations = scaledRandomIntBetween(1, numberOfItems);
List<Object> items = new ArrayList<>(numberOfItems);
for (int i = 0; i < numberOfItems; i++) {
items.add(new Object());
}
final AtomicInteger iterations = new AtomicInteger(0);
final BiConsumer<Object, ActionListener<Object>> consumer = (listValue, listener) -> {
final int current = iterations.incrementAndGet();
if (current == numberOfIterations) {
listener.onResponse(items.get(current - 1));
} else {
listener.onResponse(null);
}
};
IteratingActionListener<Object, Object> iteratingListener = new IteratingActionListener<>(
ActionTestUtils.assertNoFailureListener(object -> {
assertNotNull(object);
assertThat(object, sameInstance(items.get(numberOfIterations - 1)));
}),
consumer,
items,
new ThreadContext(Settings.EMPTY)
);
iteratingListener.run();
// we never really went async, its all chained together so verify this for sanity
assertEquals(numberOfIterations, iterations.get());
}
public void testIterationDoesntAllowThreadContextLeak() {
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
final int numberOfItems = scaledRandomIntBetween(1, 32);
final int numberOfIterations = scaledRandomIntBetween(1, numberOfItems);
List<Object> items = new ArrayList<>(numberOfItems);
for (int i = 0; i < numberOfItems; i++) {
items.add(new Object());
}
threadContext.putHeader("outside", "listener");
final AtomicInteger iterations = new AtomicInteger(0);
final BiConsumer<Object, ActionListener<Object>> consumer = (listValue, listener) -> {
final int current = iterations.incrementAndGet();
assertEquals("listener", threadContext.getHeader("outside"));
if (current == numberOfIterations) {
threadContext.putHeader("foo", "bar");
listener.onResponse(items.get(current - 1));
} else {
listener.onResponse(null);
}
};
IteratingActionListener<Object, Object> iteratingListener = new IteratingActionListener<>(
ActionTestUtils.assertNoFailureListener(object -> {
assertNotNull(object);
assertThat(object, sameInstance(items.get(numberOfIterations - 1)));
assertEquals("bar", threadContext.getHeader("foo"));
assertEquals("listener", threadContext.getHeader("outside"));
}),
consumer,
items,
threadContext
);
iteratingListener.run();
// we never really went async, its all chained together so verify this for sanity
assertEquals(numberOfIterations, iterations.get());
assertNull(threadContext.getHeader("foo"));
assertEquals("listener", threadContext.getHeader("outside"));
}
public void testIterationEmptyList() {
IteratingActionListener<Object, Object> listener = new IteratingActionListener<>(
ActionTestUtils.assertNoFailureListener(Assert::assertNull),
(listValue, iteratingListener) -> fail("consumer should not have been called!!!"),
Collections.emptyList(),
new ThreadContext(Settings.EMPTY)
);
listener.run();
}
public void testFailure() {
final int numberOfItems = scaledRandomIntBetween(1, 32);
final int numberOfIterations = scaledRandomIntBetween(1, numberOfItems);
List<Object> items = new ArrayList<>(numberOfItems);
for (int i = 0; i < numberOfItems; i++) {
items.add(new Object());
}
final AtomicInteger iterations = new AtomicInteger(0);
final BiConsumer<Object, ActionListener<Object>> consumer = (listValue, listener) -> {
final int current = iterations.incrementAndGet();
if (current == numberOfIterations) {
listener.onFailure(new ElasticsearchException("expected exception"));
} else {
listener.onResponse(null);
}
};
final AtomicBoolean onFailureCalled = new AtomicBoolean(false);
IteratingActionListener<Object, Object> iteratingListener = new IteratingActionListener<>(ActionListener.wrap((object) -> {
fail("onResponse should not have been called, but was called with: " + object);
}, (e) -> {
assertEquals("expected exception", e.getMessage());
assertTrue(onFailureCalled.compareAndSet(false, true));
}), consumer, items, new ThreadContext(Settings.EMPTY));
iteratingListener.run();
// we never really went async, its all chained together so verify this for sanity
assertEquals(numberOfIterations, iterations.get());
assertTrue(onFailureCalled.get());
}
public void testFunctionApplied() {
final int numberOfItems = scaledRandomIntBetween(2, 32);
final int numberOfIterations = scaledRandomIntBetween(1, numberOfItems);
List<Object> items = new ArrayList<>(numberOfItems);
for (int i = 0; i < numberOfItems; i++) {
items.add(new Object());
}
final AtomicInteger iterations = new AtomicInteger(0);
final Predicate<Object> iterationPredicate = object -> {
final int current = iterations.incrementAndGet();
return current != numberOfIterations;
};
final BiConsumer<Object, ActionListener<Object>> consumer = (listValue, listener) -> {
listener.onResponse(items.get(iterations.get()));
};
final AtomicReference<Object> originalObject = new AtomicReference<>();
final AtomicReference<Object> result = new AtomicReference<>();
final Function<Object, Object> responseFunction = object -> {
originalObject.set(object);
Object randomResult;
do {
randomResult = randomFrom(items);
} while (randomResult == object);
result.set(randomResult);
return randomResult;
};
IteratingActionListener<Object, Object> iteratingListener = new IteratingActionListener<>(
ActionTestUtils.assertNoFailureListener(object -> {
assertNotNull(object);
assertNotNull(originalObject.get());
assertThat(object, sameInstance(result.get()));
assertThat(object, not(sameInstance(originalObject.get())));
assertThat(originalObject.get(), sameInstance(items.get(iterations.get() - 1)));
}),
consumer,
items,
new ThreadContext(Settings.EMPTY),
responseFunction,
iterationPredicate
);
iteratingListener.run();
// we never really went async, its all chained together so verify this for sanity
assertEquals(numberOfIterations, iterations.get());
}
}
| IteratingActionListenerTests |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/physical/stream/TwoStageOptimizedAggregateRule.java | {
"start": 3579,
"end": 9295
} | class ____
extends RelRule<TwoStageOptimizedAggregateRule.TwoStageOptimizedAggregateRuleConfig> {
public static final TwoStageOptimizedAggregateRule INSTANCE =
TwoStageOptimizedAggregateRule.TwoStageOptimizedAggregateRuleConfig.DEFAULT.toRule();
private TwoStageOptimizedAggregateRule(TwoStageOptimizedAggregateRuleConfig config) {
super(config);
}
@Override
public boolean matches(RelOptRuleCall call) {
TableConfig tableConfig = unwrapTableConfig(call);
boolean isMiniBatchEnabled =
tableConfig.get(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED);
boolean isTwoPhaseEnabled =
getAggPhaseStrategy(tableConfig) != AggregatePhaseStrategy.ONE_PHASE;
return isMiniBatchEnabled && isTwoPhaseEnabled && matchesTwoStage(call.rel(0), call.rel(2));
}
public static boolean matchesTwoStage(StreamPhysicalGroupAggregate agg, RelNode realInput) {
boolean needRetraction = !ChangelogPlanUtils.isInsertOnly((StreamPhysicalRel) realInput);
FlinkRelMetadataQuery fmq =
FlinkRelMetadataQuery.reuseOrCreate(agg.getCluster().getMetadataQuery());
RelModifiedMonotonicity monotonicity = fmq.getRelModifiedMonotonicity(agg);
boolean[] needRetractionArray =
AggregateUtil.deriveAggCallNeedRetractions(
agg.grouping().length, agg.aggCalls(), needRetraction, monotonicity);
AggregateInfoList aggInfoList =
AggregateUtil.transformToStreamAggregateInfoList(
unwrapTypeFactory(agg),
FlinkTypeFactory.toLogicalRowType(agg.getInput().getRowType()),
agg.aggCalls(),
needRetractionArray,
needRetraction,
true,
true);
return AggregateUtil.doAllSupportPartialMerge(aggInfoList.aggInfos())
&& !isInputSatisfyRequiredDistribution(realInput, agg.grouping());
}
private static boolean isInputSatisfyRequiredDistribution(RelNode input, int[] keys) {
RelDistribution requiredDistribution = createDistribution(keys);
RelTraitSet traitSet = input.getTraitSet();
RelDistribution inputDistribution =
traitSet.getTrait(FlinkRelDistributionTraitDef.INSTANCE());
return inputDistribution.satisfies(requiredDistribution);
}
private static FlinkRelDistribution createDistribution(int[] keys) {
if (keys.length > 0) {
List<Integer> fields = IntStream.of(keys).boxed().collect(Collectors.toList());
return FlinkRelDistribution.hash(fields, true);
} else {
return FlinkRelDistribution.SINGLETON();
}
}
@Override
public void onMatch(RelOptRuleCall call) {
StreamPhysicalGroupAggregate originalAgg = call.rel(0);
RelNode realInput = call.rel(2);
boolean needRetraction = !ChangelogPlanUtils.isInsertOnly((StreamPhysicalRel) realInput);
FlinkRelMetadataQuery fmq = FlinkRelMetadataQuery.reuseOrCreate(call.getMetadataQuery());
RelModifiedMonotonicity monotonicity = fmq.getRelModifiedMonotonicity(originalAgg);
boolean[] aggCallNeedRetractions =
AggregateUtil.deriveAggCallNeedRetractions(
originalAgg.grouping().length,
originalAgg.aggCalls(),
needRetraction,
monotonicity);
// Set the traits for the local aggregation
RelTraitSet localAggTraitSet =
realInput
.getTraitSet()
.plus(ModifyKindSetTrait.INSERT_ONLY())
.plus(UpdateKindTrait.NONE());
StreamPhysicalLocalGroupAggregate localHashAgg =
new StreamPhysicalLocalGroupAggregate(
originalAgg.getCluster(),
localAggTraitSet,
realInput,
originalAgg.grouping(),
originalAgg.aggCalls(),
aggCallNeedRetractions,
needRetraction,
originalAgg.partialFinalType());
// Global grouping keys are forwarded by local agg, use identity keys
int[] globalGrouping =
java.util.stream.IntStream.range(0, originalAgg.grouping().length).toArray();
FlinkRelDistribution globalDistribution = createDistribution(globalGrouping);
// create exchange if needed
RelNode newInput =
FlinkExpandConversionRule.satisfyDistribution(
FlinkConventions.STREAM_PHYSICAL(), localHashAgg, globalDistribution);
RelTraitSet globalAggProvidedTraitSet = originalAgg.getTraitSet();
StreamPhysicalGlobalGroupAggregate globalAgg =
new StreamPhysicalGlobalGroupAggregate(
originalAgg.getCluster(),
globalAggProvidedTraitSet,
newInput,
originalAgg.getRowType(),
globalGrouping,
originalAgg.aggCalls(),
aggCallNeedRetractions,
realInput.getRowType(),
needRetraction,
originalAgg.partialFinalType(),
Option.empty(),
originalAgg.hints());
call.transformTo(globalAgg);
}
/** Rule configuration. */
@Value.Immutable(singleton = false)
public | TwoStageOptimizedAggregateRule |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMStoreCommands.java | {
"start": 1811,
"end": 6463
} | class ____ {
@Test
public void testFormatStateStoreCmdForZK() throws Exception {
StateChangeRequestInfo req = new StateChangeRequestInfo(
HAServiceProtocol.RequestSource.REQUEST_BY_USER);
try (TestingServer curatorTestingServer =
TestZKRMStateStore.setupCuratorServer();
CuratorFramework curatorFramework = TestZKRMStateStore.
setupCuratorFramework(curatorTestingServer)) {
Configuration conf = TestZKRMStateStore.createHARMConf("rm1,rm2", "rm1",
1234, false, curatorTestingServer);
ResourceManager rm = new MockRM(conf);
rm.start();
rm.getRMContext().getRMAdminService().transitionToActive(req);
String zkStateRoot = ZKRMStateStore.ROOT_ZNODE_NAME;
assertEquals(zkStateRoot, curatorFramework.getChildren().forPath(
YarnConfiguration.DEFAULT_ZK_RM_STATE_STORE_PARENT_PATH).get(0),
"RM State store parent path should have a child node " +
zkStateRoot);
rm.close();
try {
ResourceManager.deleteRMStateStore(conf);
} catch (Exception e) {
fail("Exception should not be thrown during format rm state store" +
" operation.");
}
assertTrue(curatorFramework.getChildren().forPath(
YarnConfiguration.DEFAULT_ZK_RM_STATE_STORE_PARENT_PATH).isEmpty(),
"After store format parent path should have no child nodes");
}
}
@Test
public void testFormatConfStoreCmdForZK() throws Exception {
try (TestingServer curatorTestingServer =
TestZKRMStateStore.setupCuratorServer();
CuratorFramework curatorFramework = TestZKRMStateStore.
setupCuratorFramework(curatorTestingServer)) {
Configuration conf = TestZKRMStateStore.createHARMConf("rm1,rm2", "rm1",
1234, false, curatorTestingServer);
conf.set(YarnConfiguration.SCHEDULER_CONFIGURATION_STORE_CLASS,
YarnConfiguration.ZK_CONFIGURATION_STORE);
ResourceManager rm = new MockRM(conf);
rm.start();
String confStorePath = conf.get(
YarnConfiguration.RM_SCHEDCONF_STORE_ZK_PARENT_PATH,
YarnConfiguration.DEFAULT_RM_SCHEDCONF_STORE_ZK_PARENT_PATH)
+ "/CONF_STORE";
assertNotNull(curatorFramework.checkExists().forPath(confStorePath),
"Failed to initialize ZKConfigurationStore");
rm.close();
try {
ResourceManager.deleteRMConfStore(conf);
} catch (Exception e) {
fail("Exception should not be thrown during format rm conf store" +
" operation.");
}
assertNull(curatorFramework.checkExists().forPath(confStorePath),
"Failed to format ZKConfigurationStore");
}
}
@Test
public void testRemoveApplicationFromStateStoreCmdForZK() throws Exception {
StateChangeRequestInfo req = new StateChangeRequestInfo(
HAServiceProtocol.RequestSource.REQUEST_BY_USER);
try (TestingServer curatorTestingServer =
TestZKRMStateStore.setupCuratorServer();
CuratorFramework curatorFramework = TestZKRMStateStore.
setupCuratorFramework(curatorTestingServer)) {
Configuration conf = TestZKRMStateStore.createHARMConf("rm1,rm2", "rm1",
1234, false, curatorTestingServer);
ResourceManager rm = new MockRM(conf);
rm.start();
rm.getRMContext().getRMAdminService().transitionToActive(req);
rm.close();
String appId = ApplicationId.newInstance(
System.currentTimeMillis(), 1).toString();
String appRootPath = YarnConfiguration.
DEFAULT_ZK_RM_STATE_STORE_PARENT_PATH + "/"+
ZKRMStateStore.ROOT_ZNODE_NAME + "/" + RMStateStore.RM_APP_ROOT;
String appIdPath = appRootPath + "/" + appId;
curatorFramework.create().forPath(appIdPath);
for (String path : curatorFramework.getChildren().forPath(appRootPath)) {
if (path.equals(ZKRMStateStore.RM_APP_ROOT_HIERARCHIES)) {
continue;
}
assertEquals(appId, path,
"Application node for " + appId + " should exist");
}
try {
ResourceManager.removeApplication(conf, appId);
} catch (Exception e) {
fail("Exception should not be thrown while removing app from " +
"rm state store.");
}
assertTrue(
curatorFramework.getChildren().forPath(appRootPath).size() == 1 &&
curatorFramework.getChildren().forPath(appRootPath).get(0).equals(
ZKRMStateStore.RM_APP_ROOT_HIERARCHIES),
"After remove app from store there should be no child nodes" +
" for application in app root path");
}
}
} | TestRMStoreCommands |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/seqno/RetentionLeasesXContentTests.java | {
"start": 703,
"end": 1883
} | class ____ extends AbstractXContentTestCase<RetentionLeases> {
@Override
protected RetentionLeases createTestInstance() {
final long primaryTerm = randomNonNegativeLong();
final long version = randomNonNegativeLong();
final int length = randomIntBetween(0, 8);
final List<RetentionLease> leases = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
final String id = randomAlphaOfLength(8);
final long retainingSequenceNumber = randomNonNegativeLong();
final long timestamp = randomNonNegativeLong();
final String source = randomAlphaOfLength(8);
final RetentionLease retentionLease = new RetentionLease(id, retainingSequenceNumber, timestamp, source);
leases.add(retentionLease);
}
return new RetentionLeases(primaryTerm, version, leases);
}
@Override
protected RetentionLeases doParseInstance(final XContentParser parser) throws IOException {
return RetentionLeases.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
}
| RetentionLeasesXContentTests |
java | apache__maven | impl/maven-di/src/test/java/org/apache/maven/di/impl/InjectorImplTest.java | {
"start": 4056,
"end": 4510
} | class ____ {
@Inject
MyService service;
}
}
@Test
void typedTest() {
Injector injector =
Injector.create().bindImplicit(TypedTest.MyServiceImpl.class).bindImplicit(TypedTest.MyMojo.class);
TypedTest.MyMojo mojo = injector.getInstance(TypedTest.MyMojo.class);
assertNotNull(mojo);
assertNotNull(mojo.service);
}
@SuppressWarnings("unused")
static | MyMojo |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RLocalCachedMapCache.java | {
"start": 977,
"end": 1361
} | interface ____<K, V> extends RMapCache<K, V>, RLocalCachedMap<K, V> {
/**
* Pre-warm the cached values. Not guaranteed to load ALL values, but statistically
* will preload approximately all (all if no concurrent mutating activity)
* Intended for use with no-eviction caches where entire maps are locally cached
*/
void preloadCache();
}
| RLocalCachedMapCache |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java | {
"start": 2134,
"end": 31768
} | class ____ {
protected static final Logger LOG = LoggerFactory.getLogger(
CryptoStreamsTestBase.class);
protected static CryptoCodec codec;
protected static final byte[] key = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06,
0x07, 0x08, 0x09, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16};
protected static final byte[] iv = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06,
0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08};
protected static final int count = 10000;
protected static int defaultBufferSize = 8192;
protected static int smallBufferSize = 1024;
private byte[] data;
private int dataLen;
@BeforeEach
public void setUp() throws IOException {
// Generate data
final int seed = new Random().nextInt();
final DataOutputBuffer dataBuf = new DataOutputBuffer();
final RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for(int i = 0; i < count; ++i) {
generator.next();
final RandomDatum key = generator.getKey();
final RandomDatum value = generator.getValue();
key.write(dataBuf);
value.write(dataBuf);
}
LOG.info("Generated " + count + " records");
data = dataBuf.getData();
dataLen = dataBuf.getLength();
}
protected void writeData(OutputStream out) throws Exception {
out.write(data, 0, dataLen);
out.close();
}
protected int getDataLen() {
return dataLen;
}
private int readAll(InputStream in, byte[] b, int off, int len)
throws IOException {
int n = 0;
int total = 0;
while (n != -1) {
total += n;
if (total >= len) {
break;
}
n = in.read(b, off + total, len - total);
}
return total;
}
private int preadAll(PositionedReadable in, byte[] b, int off, int len)
throws IOException {
int n = 0;
int total = 0;
while (n != -1) {
total += n;
if (total >= len) {
break;
}
n = in.read(total, b, off + total, len - total);
}
return total;
}
private void preadCheck(PositionedReadable in) throws Exception {
byte[] result = new byte[dataLen];
int n = preadAll(in, result, 0, dataLen);
assertEquals(dataLen, n);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
assertArrayEquals(result, expectedData);
}
private int byteBufferPreadAll(ByteBufferPositionedReadable in,
ByteBuffer buf) throws IOException {
int n = 0;
int total = 0;
while (n != -1) {
total += n;
if (!buf.hasRemaining()) {
break;
}
n = in.read(total, buf);
}
return total;
}
private void byteBufferPreadCheck(ByteBufferPositionedReadable in)
throws Exception {
ByteBuffer result = ByteBuffer.allocate(dataLen);
int n = byteBufferPreadAll(in, result);
assertEquals(dataLen, n);
ByteBuffer expectedData = ByteBuffer.allocate(n);
expectedData.put(data, 0, n);
assertArrayEquals(result.array(), expectedData.array());
}
protected OutputStream getOutputStream(int bufferSize) throws IOException {
return getOutputStream(bufferSize, key, iv);
}
protected abstract OutputStream getOutputStream(int bufferSize, byte[] key,
byte[] iv) throws IOException;
protected InputStream getInputStream(int bufferSize) throws IOException {
return getInputStream(bufferSize, key, iv);
}
protected abstract InputStream getInputStream(int bufferSize, byte[] key,
byte[] iv) throws IOException;
/** Test crypto reading with different buffer size. */
@Test
@Timeout(value = 120)
public void testRead() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
// Default buffer size
InputStream in = getInputStream(defaultBufferSize);
readCheck(in);
in.close();
// Small buffer size
in = getInputStream(smallBufferSize);
readCheck(in);
in.close();
}
private void readCheck(InputStream in) throws Exception {
byte[] result = new byte[dataLen];
int n = readAll(in, result, 0, dataLen);
assertEquals(dataLen, n);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
assertArrayEquals(result, expectedData);
// EOF
n = in.read(result, 0, dataLen);
assertThat(n).isEqualTo(-1);
}
/** Test crypto writing with different buffer size. */
@Test
@Timeout(value = 120)
public void testWrite() throws Exception {
// Default buffer size
writeCheck(defaultBufferSize);
// Small buffer size
writeCheck(smallBufferSize);
}
private void writeCheck(int bufferSize) throws Exception {
OutputStream out = getOutputStream(bufferSize);
writeData(out);
if (out instanceof FSDataOutputStream) {
assertEquals(((FSDataOutputStream) out).getPos(), getDataLen());
}
}
/** Test crypto with different IV. */
@Test
@Timeout(value = 120)
public void testCryptoIV() throws Exception {
byte[] iv1 = iv.clone();
// Counter base: Long.MAX_VALUE
setCounterBaseForIV(iv1, Long.MAX_VALUE);
cryptoCheck(iv1);
// Counter base: Long.MAX_VALUE - 1
setCounterBaseForIV(iv1, Long.MAX_VALUE - 1);
cryptoCheck(iv1);
// Counter base: Integer.MAX_VALUE
setCounterBaseForIV(iv1, Integer.MAX_VALUE);
cryptoCheck(iv1);
// Counter base: 0
setCounterBaseForIV(iv1, 0);
cryptoCheck(iv1);
// Counter base: -1
setCounterBaseForIV(iv1, -1);
cryptoCheck(iv1);
}
private void cryptoCheck(byte[] iv) throws Exception {
OutputStream out = getOutputStream(defaultBufferSize, key, iv);
writeData(out);
InputStream in = getInputStream(defaultBufferSize, key, iv);
readCheck(in);
in.close();
}
private void setCounterBaseForIV(byte[] iv, long counterBase) {
ByteBuffer buf = ByteBuffer.wrap(iv);
buf.order(ByteOrder.BIG_ENDIAN);
buf.putLong(iv.length - 8, counterBase);
}
/**
* Test hflush/hsync of crypto output stream, and with different buffer size.
*/
@Test
@Timeout(value = 120)
public void testSyncable() throws IOException {
syncableCheck();
}
private void syncableCheck() throws IOException {
OutputStream out = getOutputStream(smallBufferSize);
try {
int bytesWritten = dataLen / 3;
out.write(data, 0, bytesWritten);
((Syncable) out).hflush();
InputStream in = getInputStream(defaultBufferSize);
verify(in, bytesWritten, data);
in.close();
out.write(data, bytesWritten, dataLen - bytesWritten);
((Syncable) out).hsync();
in = getInputStream(defaultBufferSize);
verify(in, dataLen, data);
in.close();
} finally {
out.close();
}
}
private void verify(InputStream in, int bytesToVerify,
byte[] expectedBytes) throws IOException {
final byte[] readBuf = new byte[bytesToVerify];
readAll(in, readBuf, 0, bytesToVerify);
for (int i = 0; i < bytesToVerify; i++) {
assertEquals(expectedBytes[i], readBuf[i]);
}
}
private int readAll(InputStream in, long pos, byte[] b, int off, int len)
throws IOException {
int n = 0;
int total = 0;
while (n != -1) {
total += n;
if (total >= len) {
break;
}
n = ((PositionedReadable) in).read(pos + total, b, off + total,
len - total);
}
return total;
}
private int readAll(InputStream in, long pos, ByteBuffer buf)
throws IOException {
int n = 0;
int total = 0;
while (n != -1) {
total += n;
if (!buf.hasRemaining()) {
break;
}
n = ((ByteBufferPositionedReadable) in).read(pos + total, buf);
}
return total;
}
/** Test positioned read. */
@Test
@Timeout(value = 120)
public void testPositionedRead() throws Exception {
try (OutputStream out = getOutputStream(defaultBufferSize)) {
writeData(out);
}
try (InputStream in = getInputStream(defaultBufferSize)) {
// Pos: 1/3 dataLen
positionedReadCheck(in, dataLen / 3);
// Pos: 1/2 dataLen
positionedReadCheck(in, dataLen / 2);
}
}
private void positionedReadCheck(InputStream in, int pos) throws Exception {
byte[] result = new byte[dataLen];
int n = readAll(in, pos, result, 0, dataLen);
assertEquals(dataLen, n + pos);
byte[] readData = new byte[n];
System.arraycopy(result, 0, readData, 0, n);
byte[] expectedData = new byte[n];
System.arraycopy(data, pos, expectedData, 0, n);
assertArrayEquals(readData, expectedData);
}
/** Test positioned read with ByteBuffers. */
@Test
@Timeout(value = 120)
public void testPositionedReadWithByteBuffer() throws Exception {
try (OutputStream out = getOutputStream(defaultBufferSize)) {
writeData(out);
}
try (InputStream in = getInputStream(defaultBufferSize)) {
// Pos: 1/3 dataLen
positionedReadCheckWithByteBuffer(in, dataLen / 3);
// Pos: 1/2 dataLen
positionedReadCheckWithByteBuffer(in, dataLen / 2);
}
}
private void positionedReadCheckWithByteBuffer(InputStream in, int pos)
throws Exception {
ByteBuffer result = ByteBuffer.allocate(dataLen);
int n = readAll(in, pos, result);
assertEquals(dataLen, n + pos);
byte[] readData = new byte[n];
System.arraycopy(result.array(), 0, readData, 0, n);
byte[] expectedData = new byte[n];
System.arraycopy(data, pos, expectedData, 0, n);
assertArrayEquals(readData, expectedData);
}
/** Test read fully. */
@Test
@Timeout(value = 120)
public void testReadFully() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
try (InputStream in = getInputStream(defaultBufferSize)) {
final int len1 = dataLen / 4;
// Read len1 bytes
byte[] readData = new byte[len1];
readAll(in, readData, 0, len1);
byte[] expectedData = new byte[len1];
System.arraycopy(data, 0, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
// Pos: 1/3 dataLen
readFullyCheck(in, dataLen / 3);
// Read len1 bytes
readData = new byte[len1];
readAll(in, readData, 0, len1);
expectedData = new byte[len1];
System.arraycopy(data, len1, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
// Pos: 1/2 dataLen
readFullyCheck(in, dataLen / 2);
// Read len1 bytes
readData = new byte[len1];
readAll(in, readData, 0, len1);
expectedData = new byte[len1];
System.arraycopy(data, 2 * len1, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
}
}
private void readFullyCheck(InputStream in, int pos) throws Exception {
byte[] result = new byte[dataLen - pos];
((PositionedReadable) in).readFully(pos, result);
byte[] expectedData = new byte[dataLen - pos];
System.arraycopy(data, pos, expectedData, 0, dataLen - pos);
assertArrayEquals(result, expectedData);
result = new byte[dataLen]; // Exceeds maximum length
try {
((PositionedReadable) in).readFully(pos, result);
fail("Read fully exceeds maximum length should fail.");
} catch (EOFException e) {
}
}
/** Test byte byffer read fully. */
@Test
@Timeout(value = 120)
public void testByteBufferReadFully() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
try (InputStream in = getInputStream(defaultBufferSize)) {
final int len1 = dataLen / 4;
// Read len1 bytes
byte[] readData = new byte[len1];
readAll(in, readData, 0, len1);
byte[] expectedData = new byte[len1];
System.arraycopy(data, 0, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
// Pos: 1/3 dataLen
byteBufferReadFullyCheck(in, dataLen / 3);
// Read len1 bytes
readData = new byte[len1];
readAll(in, readData, 0, len1);
expectedData = new byte[len1];
System.arraycopy(data, len1, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
// Pos: 1/2 dataLen
byteBufferReadFullyCheck(in, dataLen / 2);
// Read len1 bytes
readData = new byte[len1];
readAll(in, readData, 0, len1);
expectedData = new byte[len1];
System.arraycopy(data, 2 * len1, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
}
}
private void byteBufferReadFullyCheck(InputStream in, int pos)
throws Exception {
ByteBuffer result = ByteBuffer.allocate(dataLen - pos);
((ByteBufferPositionedReadable) in).readFully(pos, result);
byte[] expectedData = new byte[dataLen - pos];
System.arraycopy(data, pos, expectedData, 0, dataLen - pos);
assertArrayEquals(result.array(), expectedData);
result = ByteBuffer.allocate(dataLen); // Exceeds maximum length
try {
((ByteBufferPositionedReadable) in).readFully(pos, result);
fail("Read fully exceeds maximum length should fail.");
} catch (EOFException e) {
}
}
/** Test seek to different position. */
@Test
@Timeout(value = 120)
public void testSeek() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
InputStream in = getInputStream(defaultBufferSize);
// Pos: 1/3 dataLen
seekCheck(in, dataLen / 3);
// Pos: 0
seekCheck(in, 0);
// Pos: 1/2 dataLen
seekCheck(in, dataLen / 2);
final long pos = ((Seekable) in).getPos();
// Pos: -3
try {
seekCheck(in, -3);
fail("Seek to negative offset should fail.");
} catch (EOFException e) {
GenericTestUtils.assertExceptionContains(
FSExceptionMessages.NEGATIVE_SEEK, e);
}
assertEquals(pos, ((Seekable) in).getPos());
// Pos: dataLen + 3
try {
seekCheck(in, dataLen + 3);
fail("Seek after EOF should fail.");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("Cannot seek after EOF", e);
}
assertEquals(pos, ((Seekable) in).getPos());
in.close();
}
private void seekCheck(InputStream in, int pos) throws Exception {
byte[] result = new byte[dataLen];
((Seekable) in).seek(pos);
int n = readAll(in, result, 0, dataLen);
assertEquals(dataLen, n + pos);
byte[] readData = new byte[n];
System.arraycopy(result, 0, readData, 0, n);
byte[] expectedData = new byte[n];
System.arraycopy(data, pos, expectedData, 0, n);
assertArrayEquals(readData, expectedData);
}
/** Test get position. */
@Test
@Timeout(value = 120)
public void testGetPos() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
// Default buffer size
InputStream in = getInputStream(defaultBufferSize);
byte[] result = new byte[dataLen];
int n1 = readAll(in, result, 0, dataLen / 3);
assertEquals(n1, ((Seekable) in).getPos());
int n2 = readAll(in, result, n1, dataLen - n1);
assertEquals(n1 + n2, ((Seekable) in).getPos());
in.close();
}
@Test
@Timeout(value = 120)
public void testAvailable() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
// Default buffer size
InputStream in = getInputStream(defaultBufferSize);
byte[] result = new byte[dataLen];
int n1 = readAll(in, result, 0, dataLen / 3);
assertEquals(in.available(), dataLen - n1);
int n2 = readAll(in, result, n1, dataLen - n1);
assertEquals(in.available(), dataLen - n1 - n2);
in.close();
}
/** Test skip. */
@Test
@Timeout(value = 120)
public void testSkip() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
// Default buffer size
InputStream in = getInputStream(defaultBufferSize);
byte[] result = new byte[dataLen];
int n1 = readAll(in, result, 0, dataLen / 3);
assertEquals(n1, ((Seekable) in).getPos());
long skipped = in.skip(dataLen / 3);
int n2 = readAll(in, result, 0, dataLen);
assertEquals(dataLen, n1 + skipped + n2);
byte[] readData = new byte[n2];
System.arraycopy(result, 0, readData, 0, n2);
byte[] expectedData = new byte[n2];
System.arraycopy(data, dataLen - n2, expectedData, 0, n2);
assertArrayEquals(readData, expectedData);
try {
skipped = in.skip(-3);
fail("Skip Negative length should fail.");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("Negative skip length", e);
}
// Skip after EOF
skipped = in.skip(3);
assertThat(skipped).isZero();
in.close();
}
private void byteBufferReadCheck(InputStream in, ByteBuffer buf,
int bufPos) throws Exception {
buf.position(bufPos);
int n = ((ByteBufferReadable) in).read(buf);
assertEquals(bufPos + n, buf.position());
byte[] readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
assertArrayEquals(readData, expectedData);
}
private void byteBufferPreadCheck(InputStream in, ByteBuffer buf,
int bufPos) throws Exception {
// Test reading from position 0
buf.position(bufPos);
int n = ((ByteBufferPositionedReadable) in).read(0, buf);
assertEquals(bufPos + n, buf.position());
byte[] readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
assertArrayEquals(readData, expectedData);
// Test reading from half way through the data
buf.position(bufPos);
n = ((ByteBufferPositionedReadable) in).read(dataLen / 2, buf);
assertEquals(bufPos + n, buf.position());
readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
expectedData = new byte[n];
System.arraycopy(data, dataLen / 2, expectedData, 0, n);
assertArrayEquals(readData, expectedData);
}
/** Test byte buffer read with different buffer size. */
@Test
@Timeout(value = 120)
public void testByteBufferRead() throws Exception {
try (OutputStream out = getOutputStream(defaultBufferSize)) {
writeData(out);
}
// Default buffer size, initial buffer position is 0
InputStream in = getInputStream(defaultBufferSize);
ByteBuffer buf = ByteBuffer.allocate(dataLen + 100);
byteBufferReadCheck(in, buf, 0);
in.close();
// Default buffer size, initial buffer position is not 0
in = getInputStream(defaultBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 11);
in.close();
// Small buffer size, initial buffer position is 0
in = getInputStream(smallBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 0);
in.close();
// Small buffer size, initial buffer position is not 0
in = getInputStream(smallBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 11);
in.close();
// Direct buffer, default buffer size, initial buffer position is 0
in = getInputStream(defaultBufferSize);
buf = ByteBuffer.allocateDirect(dataLen + 100);
byteBufferReadCheck(in, buf, 0);
in.close();
// Direct buffer, default buffer size, initial buffer position is not 0
in = getInputStream(defaultBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 11);
in.close();
// Direct buffer, small buffer size, initial buffer position is 0
in = getInputStream(smallBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 0);
in.close();
// Direct buffer, small buffer size, initial buffer position is not 0
in = getInputStream(smallBufferSize);
buf.clear();
byteBufferReadCheck(in, buf, 11);
in.close();
}
/** Test byte buffer pread with different buffer size. */
@Test
@Timeout(value = 120)
public void testByteBufferPread() throws Exception {
try (OutputStream out = getOutputStream(defaultBufferSize)) {
writeData(out);
}
try (InputStream defaultBuf = getInputStream(defaultBufferSize);
InputStream smallBuf = getInputStream(smallBufferSize)) {
ByteBuffer buf = ByteBuffer.allocate(dataLen + 100);
// Default buffer size, initial buffer position is 0
byteBufferPreadCheck(defaultBuf, buf, 0);
// Default buffer size, initial buffer position is not 0
buf.clear();
byteBufferPreadCheck(defaultBuf, buf, 11);
// Small buffer size, initial buffer position is 0
buf.clear();
byteBufferPreadCheck(smallBuf, buf, 0);
// Small buffer size, initial buffer position is not 0
buf.clear();
byteBufferPreadCheck(smallBuf, buf, 11);
// Test with direct ByteBuffer
buf = ByteBuffer.allocateDirect(dataLen + 100);
// Direct buffer, default buffer size, initial buffer position is 0
byteBufferPreadCheck(defaultBuf, buf, 0);
// Direct buffer, default buffer size, initial buffer position is not 0
buf.clear();
byteBufferPreadCheck(defaultBuf, buf, 11);
// Direct buffer, small buffer size, initial buffer position is 0
buf.clear();
byteBufferPreadCheck(smallBuf, buf, 0);
// Direct buffer, small buffer size, initial buffer position is not 0
buf.clear();
byteBufferPreadCheck(smallBuf, buf, 11);
}
}
@Test
@Timeout(value = 120)
public void testCombinedOp() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
final int len1 = dataLen / 8;
final int len2 = dataLen / 10;
InputStream in = getInputStream(defaultBufferSize);
// Read len1 data.
byte[] readData = new byte[len1];
readAll(in, readData, 0, len1);
byte[] expectedData = new byte[len1];
System.arraycopy(data, 0, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
long pos = ((Seekable) in).getPos();
assertEquals(len1, pos);
// Seek forward len2
((Seekable) in).seek(pos + len2);
// Skip forward len2
long n = in.skip(len2);
assertEquals(len2, n);
// Pos: 1/4 dataLen
positionedReadCheck(in , dataLen / 4);
// Pos should be len1 + len2 + len2
pos = ((Seekable) in).getPos();
assertEquals(len1 + len2 + len2, pos);
// Read forward len1
ByteBuffer buf = ByteBuffer.allocate(len1);
int nRead = ((ByteBufferReadable) in).read(buf);
assertEquals(nRead, buf.position());
readData = new byte[nRead];
buf.rewind();
buf.get(readData);
expectedData = new byte[nRead];
System.arraycopy(data, (int)pos, expectedData, 0, nRead);
assertArrayEquals(readData, expectedData);
long lastPos = pos;
// Pos should be lastPos + nRead
pos = ((Seekable) in).getPos();
assertEquals(lastPos + nRead, pos);
// Pos: 1/3 dataLen
positionedReadCheck(in , dataLen / 3);
// Read forward len1
readData = new byte[len1];
readAll(in, readData, 0, len1);
expectedData = new byte[len1];
System.arraycopy(data, (int)pos, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
lastPos = pos;
// Pos should be lastPos + len1
pos = ((Seekable) in).getPos();
assertEquals(lastPos + len1, pos);
// Read forward len1
buf = ByteBuffer.allocate(len1);
nRead = ((ByteBufferReadable) in).read(buf);
assertEquals(nRead, buf.position());
readData = new byte[nRead];
buf.rewind();
buf.get(readData);
expectedData = new byte[nRead];
System.arraycopy(data, (int)pos, expectedData, 0, nRead);
assertArrayEquals(readData, expectedData);
lastPos = pos;
// Pos should be lastPos + nRead
pos = ((Seekable) in).getPos();
assertEquals(lastPos + nRead, pos);
// ByteBuffer read after EOF
((Seekable) in).seek(dataLen);
buf.clear();
n = ((ByteBufferReadable) in).read(buf);
assertThat(n).isEqualTo(-1);
in.close();
}
@Test
@Timeout(value = 120)
public void testSeekToNewSource() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
InputStream in = getInputStream(defaultBufferSize);
final int len1 = dataLen / 8;
byte[] readData = new byte[len1];
readAll(in, readData, 0, len1);
// Pos: 1/3 dataLen
seekToNewSourceCheck(in, dataLen / 3);
// Pos: 0
seekToNewSourceCheck(in, 0);
// Pos: 1/2 dataLen
seekToNewSourceCheck(in, dataLen / 2);
// Pos: -3
try {
seekToNewSourceCheck(in, -3);
fail("Seek to negative offset should fail.");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("Cannot seek to negative " +
"offset", e);
}
// Pos: dataLen + 3
try {
seekToNewSourceCheck(in, dataLen + 3);
fail("Seek after EOF should fail.");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("Attempted to read past " +
"end of file", e);
}
in.close();
}
private void seekToNewSourceCheck(InputStream in, int targetPos)
throws Exception {
byte[] result = new byte[dataLen];
((Seekable) in).seekToNewSource(targetPos);
int n = readAll(in, result, 0, dataLen);
assertEquals(dataLen, n + targetPos);
byte[] readData = new byte[n];
System.arraycopy(result, 0, readData, 0, n);
byte[] expectedData = new byte[n];
System.arraycopy(data, targetPos, expectedData, 0, n);
assertArrayEquals(readData, expectedData);
}
private ByteBufferPool getBufferPool() {
return new ByteBufferPool() {
@Override
public ByteBuffer getBuffer(boolean direct, int length) {
return ByteBuffer.allocateDirect(length);
}
@Override
public void putBuffer(ByteBuffer buffer) {
}
};
}
@Test
@Timeout(value = 120)
public void testHasEnhancedByteBufferAccess() throws Exception {
OutputStream out = getOutputStream(defaultBufferSize);
writeData(out);
InputStream in = getInputStream(defaultBufferSize);
final int len1 = dataLen / 8;
// ByteBuffer size is len1
ByteBuffer buffer = ((HasEnhancedByteBufferAccess) in).read(
getBufferPool(), len1, EnumSet.of(ReadOption.SKIP_CHECKSUMS));
int n1 = buffer.remaining();
byte[] readData = new byte[n1];
buffer.get(readData);
byte[] expectedData = new byte[n1];
System.arraycopy(data, 0, expectedData, 0, n1);
assertArrayEquals(readData, expectedData);
((HasEnhancedByteBufferAccess) in).releaseBuffer(buffer);
// Read len1 bytes
readData = new byte[len1];
readAll(in, readData, 0, len1);
expectedData = new byte[len1];
System.arraycopy(data, n1, expectedData, 0, len1);
assertArrayEquals(readData, expectedData);
// ByteBuffer size is len1
buffer = ((HasEnhancedByteBufferAccess) in).read(
getBufferPool(), len1, EnumSet.of(ReadOption.SKIP_CHECKSUMS));
int n2 = buffer.remaining();
readData = new byte[n2];
buffer.get(readData);
expectedData = new byte[n2];
System.arraycopy(data, n1 + len1, expectedData, 0, n2);
assertArrayEquals(readData, expectedData);
((HasEnhancedByteBufferAccess) in).releaseBuffer(buffer);
in.close();
}
/** Test unbuffer. */
@Test
@Timeout(value = 120)
public void testUnbuffer() throws Exception {
OutputStream out = getOutputStream(smallBufferSize);
writeData(out);
// Test buffered read
try (InputStream in = getInputStream(smallBufferSize)) {
// Test unbuffer after buffered read
readCheck(in);
((CanUnbuffer) in).unbuffer();
if (in instanceof Seekable) {
// Test buffered read again after unbuffer
// Must seek to the beginning first
((Seekable) in).seek(0);
readCheck(in);
}
// Test close after unbuffer
((CanUnbuffer) in).unbuffer();
// The close will be called when exiting this try-with-resource block
}
// Test pread
try (InputStream in = getInputStream(smallBufferSize)) {
if (in instanceof PositionedReadable) {
PositionedReadable pin = (PositionedReadable) in;
// Test unbuffer after pread
preadCheck(pin);
((CanUnbuffer) in).unbuffer();
// Test pread again after unbuffer
preadCheck(pin);
// Test close after unbuffer
((CanUnbuffer) in).unbuffer();
// The close will be called when exiting this try-with-resource block
}
}
// Test ByteBuffer pread
try (InputStream in = getInputStream(smallBufferSize)) {
if (in instanceof ByteBufferPositionedReadable) {
ByteBufferPositionedReadable bbpin = (ByteBufferPositionedReadable) in;
// Test unbuffer after pread
byteBufferPreadCheck(bbpin);
((CanUnbuffer) in).unbuffer();
// Test pread again after unbuffer
byteBufferPreadCheck(bbpin);
// Test close after unbuffer
((CanUnbuffer) in).unbuffer();
// The close will be called when exiting this try-with-resource block
}
}
}
}
| CryptoStreamsTestBase |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java | {
"start": 2261,
"end": 2437
} | enum ____ String representation of permissions
*
* @param permission
* 3-character string representation of permission. ex: rwx
* @return Returns FsAction | for |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/json/JsonWriter.java | {
"start": 31025,
"end": 31788
} | interface ____ {
/**
* Return a new name for the JSON member or {@code null} if the member should be
* filtered entirely.
* @param path the path of the member
* @param existingName the existing and possibly already processed name.
* @return the new name
*/
@Nullable String processName(MemberPath path, String existingName);
/**
* Factory method to create a new {@link NameProcessor} for the given operation.
* @param operation the operation to apply
* @return a new {@link NameProcessor} instance
*/
static NameProcessor of(UnaryOperator<String> operation) {
Assert.notNull(operation, "'operation' must not be null");
return (path, existingName) -> operation.apply(existingName);
}
}
/**
* Callback | NameProcessor |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/references/statics/StaticsTest.java | {
"start": 706,
"end": 1850
} | class ____ {
@RegisterExtension
final GeneratedSource generatedSource = new GeneratedSource();
@ProcessorTest
@WithClasses( { BeerMapper.class, CustomMapper.class } )
public void shouldUseStaticMethod() {
Beer beer = new Beer(); // what the heck, open another one..
beer.setPercentage( 7 );
BeerDto result = BeerMapper.INSTANCE.mapBeer( beer );
assertThat( result ).isNotNull();
assertThat( result.getCategory() ).isEqualTo( Category.STRONG ); // why settle for less?
}
@ProcessorTest
@WithClasses( { BeerMapperWithNonUsedMapper.class, NonUsedMapper.class } )
public void shouldNotImportNonUsed() {
Beer beer = new Beer(); // what the heck, open another one..
beer.setPercentage( 7 );
BeerDto result = BeerMapperWithNonUsedMapper.INSTANCE.mapBeer( beer );
assertThat( result ).isNotNull();
assertThat( result.getCategory() ).isEqualTo( Category.STRONG ); // I could shurly use one now..
generatedSource.forMapper( BeerMapperWithNonUsedMapper.class ).containsNoImportFor( NonUsedMapper.class );
}
}
| StaticsTest |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java | {
"start": 3915,
"end": 4487
} | class ____ extends Plugin implements SearchPlugin {
@Override
public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return singletonList(new TermVectorsFetchSubPhase());
}
@Override
public List<SearchExtSpec<?>> getSearchExts() {
return Collections.singletonList(
new SearchExtSpec<>(TermVectorsFetchSubPhase.NAME, TermVectorsFetchBuilder::new, TermVectorsFetchBuilder::fromXContent)
);
}
}
private static final | FetchTermVectorsPlugin |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/hive/parser/HiveCreateTableParser.java | {
"start": 1077,
"end": 15139
} | class ____ extends SQLCreateTableParser {
public HiveCreateTableParser(SQLExprParser exprParser) {
super(exprParser);
}
public HiveCreateTableParser(Lexer lexer) {
super(new HiveExprParser(lexer));
}
protected void createTableBefore(SQLCreateTableStatement stmt) {
if (lexer.nextIfIdentifier(FnvHash.Constants.EXTERNAL)) {
stmt.setExternal(true);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.TEMPORARY)) {
stmt.setTemporary(true);
}
if (lexer.nextIfIdentifier("TRANSACTIONAL")) {
stmt.config(SQLCreateTableStatement.Feature.Transactional);
}
}
protected void createTableBody(SQLCreateTableStatement stmt) {
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
Token token = lexer.token();
if (token == Token.IDENTIFIER //
|| token == Token.LITERAL_ALIAS) {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
} else if (token == Token.PRIMARY //
|| token == Token.UNIQUE //
|| token == Token.CHECK //
|| token == Token.CONSTRAINT
|| token == Token.FOREIGN) {
SQLConstraint constraint = this.exprParser.parseConstraint();
constraint.setParent(stmt);
stmt.getTableElementList().add((SQLTableElement) constraint);
} else if (token == Token.TABLESPACE) {
throw new ParserException("TODO " + lexer.info());
} else {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) { // compatible for sql server
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
} else if (lexer.token() == Token.LIKE) {
parseLike((HiveCreateTableStatement) stmt);
}
}
protected void createTableQuery(SQLCreateTableStatement stmt) {
if (lexer.token() == Token.SELECT || lexer.token() == Token.AS) {
if (lexer.token() == Token.AS) {
lexer.nextToken();
}
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
}
}
protected void parseCreateTableWithSerderPropertie(HiveCreateTableStatement stmt) {
if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("SERDEPROPERTIES");
accept(Token.LPAREN);
for (; ; ) {
String key = lexer.stringVal();
lexer.nextToken();
accept(Token.EQ);
SQLExpr value = this.exprParser.primary();
stmt.getSerdeProperties().put(key, value);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
}
protected void parseCreateTableRest(SQLCreateTableStatement createTable) {
HiveCreateTableStatement stmt = (HiveCreateTableStatement) createTable;
if (lexer.nextIfIdentifier(FnvHash.Constants.ENGINE)) {
// skip engine=xxx
accept(Token.EQ);
lexer.nextToken();
}
if (lexer.nextIfIdentifier(FnvHash.Constants.CHARSET)) {
// skip charset = xxx
accept(Token.EQ);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
lexer.nextToken();
SQLExpr using = this.exprParser.expr();
stmt.setUsing(using);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.OPTIONS)) {
accept(Token.LPAREN);
parseAssignItems(stmt.getTableOptions(), stmt, false);
accept(Token.RPAREN);
}
if (lexer.nextIf(Token.COMMENT)) {
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.MAPPED)) {
accept(Token.BY);
this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
}
if (lexer.nextIf(Token.PARTITIONED)) {
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier. " + lexer.info());
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
if (lexer.nextIfIdentifier(FnvHash.Constants.CLUSTERED)) {
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addClusteredByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
accept(Token.INTO);
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into buckets must be integer. " + lexer.info());
}
acceptIdentifier("BUCKETS");
}
if (lexer.nextIfIdentifier(FnvHash.Constants.SKEWED)) {
accept(Token.BY);
accept(Token.LPAREN);
this.exprParser.exprList(stmt.getSkewedBy(), stmt);
accept(Token.RPAREN);
accept(Token.ON);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() == Token.LPAREN) {
SQLListExpr list = new SQLListExpr();
lexer.nextToken();
this.exprParser.exprList(list.getItems(), list);
accept(Token.RPAREN);
stmt.addSkewedByOn(list);
} else {
SQLExpr expr = this.exprParser.expr();
stmt.addSkewedByOn(expr);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
if (lexer.nextIfIdentifier(FnvHash.Constants.STORED)) {
accept(Token.AS);
acceptIdentifier("DIRECTORIES");
stmt.setSkewedByStoreAsDirectories(true);
}
}
if (lexer.token() == Token.ROW
|| lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (Token.LBRACKET.equals(lexer.token())) {
stmt.setLbracketUse(true);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
lexer.nextToken();
if (lexer.token() == Token.BY) {
accept(Token.BY);
SQLName name = this.exprParser.name();
stmt.setStoredBy(name);
parseCreateTableWithSerderPropertie(stmt);
} else {
accept(Token.AS);
if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
HiveInputOutputFormat format = new HiveInputOutputFormat();
lexer.nextToken();
format.setInput(this.exprParser.primary());
if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
lexer.nextToken();
format.setOutput(this.exprParser.primary());
}
stmt.setStoredAs(format);
} else {
SQLName name = this.exprParser.name();
stmt.setStoredAs(name);
}
}
}
if (Token.RBRACKET.equals(lexer.token())) {
stmt.setRbracketUse(true);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
lexer.nextToken();
SQLExpr location = this.exprParser.primary();
stmt.setLocation(location);
}
if (lexer.token() == Token.LIKE) {
parseLike(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
parseOptions(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.META)) {
lexer.nextToken();
acceptIdentifier("LIFECYCLE");
stmt.setLifeCycle(this.exprParser.primary());
}
createTableQuery(stmt);
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
Lexer.SavePoint mark = lexer.mark();
if (lexer.token() == Token.SELECT) {
stmt.setLikeQuery(true);
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
} else {
lexer.reset(mark);
if (lexer.identifierEquals(FnvHash.Constants.MAPPING)) {
SQLExpr like = this.exprParser.primary();
stmt.setLike(new SQLExprTableSource(like));
} else {
SQLName name = this.exprParser.name();
stmt.setLike(name);
}
}
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
lexer.nextToken();
SQLExpr using = this.exprParser.expr();
stmt.setUsing(using);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
lexer.nextToken();
accept(Token.LPAREN);
parseAssignItems(stmt.getTableOptions(), stmt, false);
accept(Token.RPAREN);
}
}
protected void parseOptions(SQLCreateTableStatement stmt) {
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
String name = lexer.stringVal();
lexer.nextToken();
if (lexer.token() == Token.DOT) {
lexer.nextToken();
name += "." + lexer.stringVal();
lexer.nextToken();
}
accept(Token.EQ);
SQLExpr value = this.exprParser.primary();
stmt.addOption(name, value);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
}
protected void parseLike(HiveCreateTableStatement stmt) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.MAPPING)) {
SQLExpr like = this.exprParser.primary();
stmt.setLike(new SQLExprTableSource(like));
} else if (lexer.token() == Token.SELECT || lexer.token() == Token.LPAREN) {
SQLSelect select = this.createSQLSelectParser().select();
stmt.setLikeQuery(true);
stmt.setSelect(select);
} else {
SQLName name = this.exprParser.name();
stmt.setLike(name);
}
}
protected void parseSortedBy(HiveCreateTableStatement stmt) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addSortedByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
protected void parseRowFormat(HiveCreateTableStatement stmt) {
SQLExternalRecordFormat format = this.getExprParser().parseRowFormat();
stmt.setRowFormat(format);
parseCreateTableWithSerderPropertie(stmt);
}
@Override
public HiveExprParser getExprParser() {
return (HiveExprParser) exprParser;
}
protected HiveCreateTableStatement newCreateStatement() {
return new HiveCreateTableStatement();
}
public SQLSelectParser createSQLSelectParser() {
return new HiveSelectParser(this.exprParser, selectListCache);
}
}
| HiveCreateTableParser |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/JSONLexerTest_6.java | {
"start": 794,
"end": 843
} | class ____<T> extends LinkedList<T> {
}
}
| MyList |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/AddRaftVoterResponse.java | {
"start": 1116,
"end": 2134
} | class ____ extends AbstractResponse {
private final AddRaftVoterResponseData data;
public AddRaftVoterResponse(AddRaftVoterResponseData data) {
super(ApiKeys.ADD_RAFT_VOTER);
this.data = data;
}
@Override
public AddRaftVoterResponseData data() {
return data;
}
@Override
public int throttleTimeMs() {
return data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
@Override
public Map<Errors, Integer> errorCounts() {
if (data.errorCode() != Errors.NONE.code()) {
return Collections.singletonMap(Errors.forCode(data.errorCode()), 1);
} else {
return Collections.emptyMap();
}
}
public static AddRaftVoterResponse parse(Readable readable, short version) {
return new AddRaftVoterResponse(
new AddRaftVoterResponseData(readable, version));
}
}
| AddRaftVoterResponse |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlHelpStatement.java | {
"start": 794,
"end": 1207
} | class ____ extends MySqlStatementImpl {
private SQLExpr content;
public SQLExpr getContent() {
return content;
}
public void setContent(SQLExpr content) {
this.content = content;
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, content);
}
visitor.endVisit(this);
}
}
| MySqlHelpStatement |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/NMClientAsyncImpl.java | {
"start": 19544,
"end": 20263
} | class ____ extends ContainerEvent {
private Container container;
private boolean isIncreaseEvent;
// UpdateContainerResourceEvent constructor takes in a
// flag to support callback API's calling through the deprecated
// increaseContainerResource
public UpdateContainerResourceEvent(Container container,
boolean isIncreaseEvent) {
super(container.getId(), container.getNodeId(),
container.getContainerToken(),
ContainerEventType.UPDATE_CONTAINER_RESOURCE);
this.container = container;
this.isIncreaseEvent = isIncreaseEvent;
}
public Container getContainer() {
return container;
}
}
protected static | UpdateContainerResourceEvent |
java | quarkusio__quarkus | extensions/arc/runtime/src/main/java/io/quarkus/arc/lookup/LookupIfProperty.java | {
"start": 1090,
"end": 1667
} | class ____ {
*
* {@literal @Inject}
* Instance<Service> service;
*
* void printServiceName() {
* // This would print "bar" if the property of name "service.foo.enabled" was set to false
* // Note that service.get() would normally result in AmbiguousResolutionException
* System.out.println(service.get().name());
* }
* }
* </code>
* </pre>
*
* @see Instance
*/
@Repeatable(LookupIfProperty.List.class)
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.METHOD, ElementType.TYPE, ElementType.FIELD })
public @ | Client |
java | playframework__playframework | web/play-java-forms/src/test/scala/play/data/format/FormattersTest.java | {
"start": 2282,
"end": 2811
} | class ____ extends Formatters.SimpleFormatter<Integer> {
@Override
public Integer parse(String text, Locale locale) throws ParseException {
try {
return Integer.parseInt(text);
} catch (NumberFormatException e) {
throw new ParseException("Invalid integer (" + text + ")", 0);
}
}
@Override
public String print(Integer t, Locale locale) {
return t == null ? null : t.toString();
}
}
}
| IntegerFormatter |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/LoadablePropertiesSource.java | {
"start": 1283,
"end": 1960
} | interface ____ extends PropertiesSource {
/**
* Loads the properties from the source
*
* @return the loaded properties
*/
Properties loadProperties();
/**
* Loads the properties from the source filtering them out according to a predicate.
*
* @param filter the predicate used to filter out properties based on the key.
* @return the properties loaded.
*/
Properties loadProperties(Predicate<String> filter);
/**
* Re-loads the properties from the file location
*
* @param location the location of the properties
*/
void reloadProperties(String location);
}
| LoadablePropertiesSource |
java | apache__camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedClusterService.java | {
"start": 1296,
"end": 3801
} | class ____ implements ManagedClusterServiceMBean {
private final CamelContext context;
private final CamelClusterService service;
public ManagedClusterService(CamelContext context, CamelClusterService service) {
this.context = context;
this.service = service;
}
public void init(ManagementStrategy strategy) {
// do nothing
}
public CamelContext getContext() {
return context;
}
public CamelClusterService getService() {
return service;
}
@Override
public void start() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
service.start();
}
@Override
public void stop() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
service.stop();
}
@Override
public String getState() {
// must use String type to be sure remote JMX can read the attribute without requiring Camel classes.
if (service instanceof StatefulService statefulService) {
ServiceStatus status = statefulService.getStatus();
return status.name();
}
// assume started if not a ServiceSupport instance
return ServiceStatus.Started.name();
}
@Override
public String getCamelId() {
return context.getName();
}
@Override
public Collection<String> getNamespaces() {
return ClusterServiceHelper.lookupService(context)
.map(CamelClusterService::getNamespaces)
.orElseGet(Collections::emptyList);
}
@Override
public void startView(String namespace) throws Exception {
Optional<CamelClusterService> service = ClusterServiceHelper.lookupService(context);
if (service.isPresent()) {
service.get().startView(namespace);
}
}
@Override
public void stopView(String namespace) throws Exception {
Optional<CamelClusterService> service = ClusterServiceHelper.lookupService(context);
if (service.isPresent()) {
service.get().stopView(namespace);
}
}
@Override
public boolean isLeader(String namespace) {
return ClusterServiceHelper.lookupService(context)
.map(s -> s.isLeader(namespace))
.orElse(false);
}
}
| ManagedClusterService |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/QuarkusHttpHeaders.java | {
"start": 12740,
"end": 17493
} | class ____ implements Map.Entry<CharSequence, CharSequence> {
final int hash;
final CharSequence key;
CharSequence value;
QuarkusHttpHeaders.MapEntry next;
QuarkusHttpHeaders.MapEntry before, after;
MapEntry() {
this.hash = -1;
this.key = null;
this.value = null;
}
MapEntry(int hash, CharSequence key, CharSequence value) {
this.hash = hash;
this.key = key;
this.value = value;
}
void remove() {
before.after = after;
after.before = before;
}
void addBefore(QuarkusHttpHeaders.MapEntry e) {
after = e;
before = e.before;
before.after = this;
after.before = this;
}
@Override
public CharSequence getKey() {
return key;
}
@Override
public CharSequence getValue() {
return value;
}
@Override
public CharSequence setValue(CharSequence value) {
Objects.requireNonNull(value, "value");
if (!io.vertx.core.http.HttpHeaders.DISABLE_HTTP_HEADERS_VALIDATION) {
HttpUtils.validateHeaderValue(value);
}
CharSequence oldValue = this.value;
this.value = value;
return oldValue;
}
@Override
public String toString() {
return getKey() + "=" + getValue();
}
}
private void remove0(int h, int i, CharSequence name) {
QuarkusHttpHeaders.MapEntry e = entries[i];
if (e == null) {
return;
}
for (;;) {
CharSequence key = e.key;
if (e.hash == h && (name == key || AsciiString.contentEqualsIgnoreCase(name, key))) {
e.remove();
QuarkusHttpHeaders.MapEntry next = e.next;
if (next != null) {
entries[i] = next;
e = next;
} else {
entries[i] = null;
return;
}
} else {
break;
}
}
for (;;) {
QuarkusHttpHeaders.MapEntry next = e.next;
if (next == null) {
break;
}
CharSequence key = next.key;
if (next.hash == h && (name == key || AsciiString.contentEqualsIgnoreCase(name, key))) {
e.next = next.next;
next.remove();
} else {
e = next;
}
}
}
private void add0(int h, int i, final CharSequence name, final CharSequence value) {
if (!io.vertx.core.http.HttpHeaders.DISABLE_HTTP_HEADERS_VALIDATION) {
HttpUtils.validateHeader(name, value);
}
// Update the hash table.
QuarkusHttpHeaders.MapEntry e = entries[i];
QuarkusHttpHeaders.MapEntry newEntry;
entries[i] = newEntry = new QuarkusHttpHeaders.MapEntry(h, name, value);
newEntry.next = e;
// Update the linked list.
newEntry.addBefore(head);
}
private QuarkusHttpHeaders set0(final CharSequence name, final CharSequence strVal) {
int h = AsciiString.hashCode(name);
int i = h & 0x0000000F;
remove0(h, i, name);
if (strVal != null) {
add0(h, i, name, strVal);
}
return this;
}
private CharSequence get0(CharSequence name) {
int h = AsciiString.hashCode(name);
int i = h & 0x0000000F;
QuarkusHttpHeaders.MapEntry e = entries[i];
CharSequence value = null;
while (e != null) {
CharSequence key = e.key;
if (e.hash == h && (name == key || AsciiString.contentEqualsIgnoreCase(name, key))) {
value = e.getValue();
}
e = e.next;
}
return value;
}
private MultiMap set0(Iterable<Map.Entry<String, String>> map) {
clear();
for (Map.Entry<String, String> entry : map) {
add(entry.getKey(), entry.getValue());
}
return this;
}
public Map<Class<?>, Object> getContextObjects() {
if (contextObjects == null) {
return Collections.emptyMap();
}
return contextObjects;
}
public <T> QuarkusHttpHeaders setContextObject(Class<T> key, T type) {
if (contextObjects == null) {
contextObjects = new HashMap<>();
}
this.contextObjects.put(key, type);
return this;
}
public <T> T getContextObject(Class<T> key) {
return (T) getContextObjects().get(key);
}
} | MapEntry |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/SimpleOutputFormatOperatorFactory.java | {
"start": 1116,
"end": 1603
} | class ____<IN, OUT> extends SimpleOperatorFactory<OUT>
implements OutputFormatOperatorFactory<IN, OUT> {
private final OutputFormat<IN> outputFormat;
public SimpleOutputFormatOperatorFactory(
OutputFormat<IN> outputFormat, StreamOperator<OUT> operator) {
super(operator);
this.outputFormat = outputFormat;
}
@Override
public OutputFormat<IN> getOutputFormat() {
return outputFormat;
}
}
| SimpleOutputFormatOperatorFactory |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/ValidatorTest.java | {
"start": 22995,
"end": 23576
} | class ____ {
@InlineMeValidationDisabled("Migrating to factory method")
@Deprecated
@InlineMe(replacement = "Client.create()", imports = "foo.Client")
public Client() {}
public static Client create() {
return new Client();
}
}
""")
.doTest();
}
@Test
public void varargsPositive() {
helper
.addSourceLines(
"Client.java",
"import com.google.errorprone.annotations.InlineMe;",
"public final | Client |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.