language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/customproviders/AnotherValidNonBlockingFiltersTest.java | {
"start": 5890,
"end": 6573
} | class ____ implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
MultivaluedMap<String, String> headers = requestContext.getHeaders();
String previousFilterHeaderValue = headers.getFirst("filter-request");
headers.putSingle("filter-request", previousFilterHeaderValue + "/3-standard-non-blocking");
String previousThreadHeaderValue = headers.getFirst("thread");
headers.putSingle("thread", previousThreadHeaderValue + "/" + BlockingOperationSupport.isBlockingAllowed());
}
}
public static | StandardNonBlockingRequestFilter |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/deployment/runnerjar/PackageAppTestBase.java | {
"start": 7012,
"end": 8146
} | class ____ implements BiConsumer<PackageAppTestBase, BuildResult> {
@Override
public void accept(PackageAppTestBase test, BuildResult buildResult) {
test.assertBuildResult(buildResult);
}
}
protected void assertBuildResult(BuildResult result) {
}
private void assertAugmentOutcome(AugmentResult outcome) throws IOException {
final Path libDir = outcome.getJar().getLibraryDir();
assertTrue(Files.isDirectory(libDir));
final Path bootLibDir = libDir.resolve("boot");
assertTrue(Files.isDirectory(bootLibDir));
final Set<String> actualBootLib = getDirContent(bootLibDir);
final Path mainLibDir = libDir.resolve("main");
assertTrue(Files.isDirectory(mainLibDir));
final Set<String> actualMainLib = getDirContent(mainLibDir);
final Path runnerJar = outcome.getJar().getPath();
assertTrue(Files.exists(runnerJar));
try (JarFile jar = new JarFile(runnerJar.toFile())) {
final Attributes mainAttrs = jar.getManifest().getMainAttributes();
// assert the main | CustomBuildConsumer |
java | elastic__elasticsearch | modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/CommunityIdProcessor.java | {
"start": 15563,
"end": 19290
} | enum ____ {
Unknown(-1),
Icmp(1),
Igmp(2),
Tcp(6),
Udp(17),
Gre(47),
IcmpIpV6(58),
Eigrp(88),
Ospf(89),
Pim(103),
Sctp(132);
private final int transportNumber;
private static final Map<String, Type> TRANSPORT_NAMES = Map.ofEntries(
entry("icmp", Icmp),
entry("igmp", Igmp),
entry("tcp", Tcp),
entry("udp", Udp),
entry("gre", Gre),
entry("ipv6-icmp", IcmpIpV6),
entry("icmpv6", IcmpIpV6),
entry("eigrp", Eigrp),
entry("ospf", Ospf),
entry("pim", Pim),
entry("sctp", Sctp)
);
Type(int transportNumber) {
this.transportNumber = transportNumber;
}
public int getTransportNumber() {
return transportNumber;
}
}
private final Type type;
private final int transportNumber;
private Transport(int transportNumber, Type type) {
this.transportNumber = transportNumber;
this.type = type;
}
private Transport(Type type) {
this.transportNumber = type.getTransportNumber();
this.type = type;
}
public Type getType() {
return this.type;
}
public int getTransportNumber() {
return transportNumber;
}
// visible for testing
static Transport fromNumber(int transportNumber) {
if (transportNumber < 0 || transportNumber >= 255) {
// transport numbers range https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml
throw new IllegalArgumentException("invalid transport protocol number [" + transportNumber + "]");
}
Type type = switch (transportNumber) {
case 1 -> Type.Icmp;
case 2 -> Type.Igmp;
case 6 -> Type.Tcp;
case 17 -> Type.Udp;
case 47 -> Type.Gre;
case 58 -> Type.IcmpIpV6;
case 88 -> Type.Eigrp;
case 89 -> Type.Ospf;
case 103 -> Type.Pim;
case 132 -> Type.Sctp;
default -> Type.Unknown;
};
return new Transport(transportNumber, type);
}
private static Transport fromObject(Object o) {
if (o instanceof Number number) {
return fromNumber(number.intValue());
} else if (o instanceof String protocolStr) {
// check if matches protocol name
if (Type.TRANSPORT_NAMES.containsKey(protocolStr.toLowerCase(Locale.ROOT))) {
return new Transport(Type.TRANSPORT_NAMES.get(protocolStr.toLowerCase(Locale.ROOT)));
}
// check if convertible to protocol number
try {
int protocolNumber = Integer.parseInt(protocolStr);
return fromNumber(protocolNumber);
} catch (NumberFormatException e) {
// fall through to IllegalArgumentException
}
throw new IllegalArgumentException("could not convert string [" + protocolStr + "] to transport protocol");
} else {
throw new IllegalArgumentException(
"could not convert value of type [" + o.getClass().getName() + "] to transport protocol"
);
}
}
}
public | Type |
java | apache__logging-log4j2 | log4j-jpa/src/main/java/org/apache/logging/log4j/core/appender/db/jpa/AbstractLogEventWrapperEntity.java | {
"start": 11977,
"end": 12120
} | class ____ extends AbstractLogEvent {
private static final long serialVersionUID = 1L;
// Inherits everything
}
}
| NullLogEvent |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/BeanInstanceSupplierTests.java | {
"start": 37653,
"end": 37745
} | interface ____ {
default String test() {
return "Test";
}
}
static | MethodOnInterface |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/creation/bytebuddy/StackTraceChecker.java | {
"start": 208,
"end": 1410
} | class ____ implements Predicate<Class<?>> {
@Override
public boolean test(Class<?> type) {
StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
for (int index = 1; index < stackTrace.length - 1; index++) {
if (!stackTrace[index].getClassName().startsWith("org.mockito.internal.")) {
if (stackTrace[index + 1].getMethodName().startsWith("<init>")) {
try {
if (!stackTrace[index + 1].getClassName().equals(type.getName())
&& type.isAssignableFrom(
Class.forName(
stackTrace[index + 1].getClassName(),
false,
type.getClassLoader()))) {
return true;
} else {
break;
}
} catch (ClassNotFoundException ignored) {
break;
}
}
}
}
return false;
}
}
| StackTraceChecker |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/TopicCommand.java | {
"start": 11964,
"end": 13430
} | class ____ {
private final String topic;
private final Uuid topicId;
private final int numPartitions;
private final int replicationFactor;
private final Config config;
private final boolean markedForDeletion;
public TopicDescription(String topic, Uuid topicId, int numPartitions, int replicationFactor, Config config, boolean markedForDeletion) {
this.topic = topic;
this.topicId = topicId;
this.numPartitions = numPartitions;
this.replicationFactor = replicationFactor;
this.config = config;
this.markedForDeletion = markedForDeletion;
}
public void printDescription() {
String configsAsString = config.entries().stream()
.filter(config -> !config.isDefault())
.map(ce -> ce.name() + "=" + ce.value())
.collect(Collectors.joining(","));
System.out.print("Topic: " + topic);
if (!topicId.equals(Uuid.ZERO_UUID))
System.out.print("\tTopicId: " + topicId);
System.out.print("\tPartitionCount: " + numPartitions);
System.out.print("\tReplicationFactor: " + replicationFactor);
System.out.print("\tConfigs: " + configsAsString);
System.out.print(markedForDeletion ? "\tMarkedForDeletion: true" : "");
System.out.println();
}
}
static | TopicDescription |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/fielddata/FloatDocValuesFieldTests.java | {
"start": 881,
"end": 6312
} | class ____ extends ESTestCase {
public void testFloatField() throws IOException {
double[][] values = generate(ESTestCase::randomFloat);
FloatDocValuesField floatField = new FloatDocValuesField(wrap(values), "test");
for (int round = 0; round < 10; round++) {
int d = between(0, values.length - 1);
floatField.setNextDocId(d);
if (values[d].length > 0) {
assertEquals(values[d][0], floatField.get(Float.MIN_VALUE), 0.0);
assertEquals(values[d][0], floatField.get(0, Float.MIN_VALUE), 0.0);
}
assertEquals(values[d].length, floatField.size());
for (int i = 0; i < values[d].length; i++) {
assertEquals(values[d][i], floatField.get(i, Float.MIN_VALUE), 0.0);
}
int i = 0;
for (float f : floatField) {
assertEquals(values[d][i++], f, 0.0);
}
}
}
public void testDoubleField() throws IOException {
double[][] values = generate(ESTestCase::randomFloat);
DoubleDocValuesField doubleField = new DoubleDocValuesField(wrap(values), "test");
for (int round = 0; round < 10; round++) {
int d = between(0, values.length - 1);
doubleField.setNextDocId(d);
if (values[d].length > 0) {
assertEquals(values[d][0], doubleField.get(Float.MIN_VALUE), 0.0);
assertEquals(values[d][0], doubleField.get(0, Float.MIN_VALUE), 0.0);
}
assertEquals(values[d].length, doubleField.size());
for (int i = 0; i < values[d].length; i++) {
assertEquals(values[d][i], doubleField.get(i, Float.MIN_VALUE), 0.0);
}
int i = 0;
for (double dbl : doubleField) {
assertEquals(values[d][i++], dbl, 0.0);
}
}
}
public void testScaledFloatField() throws IOException {
double[][] values = generate(ESTestCase::randomDouble);
ScaledFloatDocValuesField scaledFloatField = new ScaledFloatDocValuesField(wrap(values), "test");
for (int round = 0; round < 10; round++) {
int d = between(0, values.length - 1);
scaledFloatField.setNextDocId(d);
if (values[d].length > 0) {
assertEquals(values[d][0], scaledFloatField.get(Double.MIN_VALUE), 0.0);
assertEquals(values[d][0], scaledFloatField.get(0, Double.MIN_VALUE), 0.0);
}
assertEquals(values[d].length, scaledFloatField.size());
for (int i = 0; i < values[d].length; i++) {
assertEquals(values[d][i], scaledFloatField.get(i, Double.MIN_VALUE), 0.0);
}
int i = 0;
for (double dbl : scaledFloatField) {
assertEquals(values[d][i++], dbl, 0.0);
}
}
}
public void testHalfFloatField() throws IOException {
double[][] values = generate(ESTestCase::randomDouble);
HalfFloatDocValuesField halfFloatField = new HalfFloatDocValuesField(wrap(values), "test");
for (int round = 0; round < 10; round++) {
int d = between(0, values.length - 1);
halfFloatField.setNextDocId(d);
if (values[d].length > 0) {
assertEquals((float) values[d][0], halfFloatField.get(Float.MIN_VALUE), 0.0f);
assertEquals((float) values[d][0], halfFloatField.get(0, Float.MIN_VALUE), 0.0f);
assertEquals(values[d][0], halfFloatField.asDouble(Double.MIN_VALUE), 0.0);
assertEquals(values[d][0], halfFloatField.asDouble(0, Double.MIN_VALUE), 0.0);
}
assertEquals(values[d].length, halfFloatField.size());
for (int i = 0; i < values[d].length; i++) {
assertEquals((float) values[d][i], halfFloatField.get(i, Float.MIN_VALUE), 0.0f);
assertEquals(values[d][i], halfFloatField.asDouble(i, Double.MIN_VALUE), 0.0);
}
int i = 0;
for (float flt : halfFloatField) {
assertEquals((float) values[d][i++], flt, 0.0f);
}
i = 0;
for (double dbl : halfFloatField.asDoubles()) {
assertEquals(values[d][i++], dbl, 0.0);
}
}
}
protected double[][] generate(DoubleSupplier supplier) {
double[][] values = new double[between(3, 10)][];
for (int d = 0; d < values.length; d++) {
values[d] = new double[randomBoolean() ? randomBoolean() ? 0 : 1 : between(2, 100)];
for (int i = 0; i < values[d].length; i++) {
values[d][i] = supplier.getAsDouble();
}
}
return values;
}
protected SortedNumericDoubleValues wrap(double[][] values) {
return new SortedNumericDoubleValues() {
double[] current;
int i;
@Override
public boolean advanceExact(int doc) {
i = 0;
current = values[doc];
return current.length > 0;
}
@Override
public int docValueCount() {
return current.length;
}
@Override
public double nextValue() {
return current[i++];
}
};
}
}
| FloatDocValuesFieldTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Artifact.java | {
"start": 1831,
"end": 4023
} | enum ____ {
DOCKER("DOCKER"), TARBALL("TARBALL"), SERVICE("SERVICE");
private String value;
TypeEnum(String value) {
this.value = value;
}
@Override
@JsonValue
public String toString() {
return value;
}
}
private TypeEnum type = TypeEnum.DOCKER;
private String uri = null;
/**
* Artifact id. Examples are package location uri for tarball based services,
* image name for docker, etc.
**/
public Artifact id(String id) {
this.id = id;
return this;
}
@ApiModelProperty(example = "null", required = true, value = "Artifact id. Examples are package location uri for tarball based services, image name for docker, etc.")
@JsonProperty("id")
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
/**
* Artifact type, like docker, tarball, etc. (optional).
**/
public Artifact type(TypeEnum type) {
this.type = type;
return this;
}
@ApiModelProperty(example = "null", value = "Artifact type, like docker, tarball, etc. (optional).")
@JsonProperty("type")
public TypeEnum getType() {
return type;
}
public void setType(TypeEnum type) {
this.type = type;
}
/**
* Artifact location to support multiple artifact stores (optional).
**/
public Artifact uri(String uri) {
this.uri = uri;
return this;
}
@ApiModelProperty(example = "null", value = "Artifact location to support multiple artifact stores (optional).")
@JsonProperty("uri")
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Artifact artifact = (Artifact) o;
return Objects.equals(this.id, artifact.id)
&& Objects.equals(this.type, artifact.type)
&& Objects.equals(this.uri, artifact.uri);
}
@Override
public int hashCode() {
return Objects.hash(id, type, uri);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(" | TypeEnum |
java | apache__flink | flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/writer/GSRecoverableFsDataOutputStream.java | {
"start": 1572,
"end": 9859
} | class ____ extends RecoverableFsDataOutputStream {
private static final Logger LOGGER =
LoggerFactory.getLogger(GSRecoverableFsDataOutputStream.class);
/** The underlying blob storage. */
private final GSBlobStorage storage;
/** The GS file system options. */
private final GSFileSystemOptions options;
/** The blob id to which the recoverable write operation is writing. */
private final GSBlobIdentifier finalBlobIdentifier;
/** The write position, i.e. number of bytes that have been written so far. */
private long position;
/** Indicates if the write has been closed. */
private boolean closed;
/** The object ids for the temporary objects that should be composed to form the final blob. */
private final ArrayList<UUID> componentObjectIds;
/**
* The current write channel, if one exists. A channel is created when one doesn't exist and
* bytes are written, and the channel is closed/destroyed when explicitly closed by the consumer
* (via close or closeForCommit) or when the data output stream is persisted (via persist).
* Calling persist does not close the data output stream, so it's possible that more bytes will
* be written, which will cause another channel to be created. So, multiple write channels may
* be created and destroyed during the lifetime of the data output stream.
*/
@Nullable private GSChecksumWriteChannel currentWriteChannel;
/**
* Constructs a new, initially empty output stream.
*
* @param storage The storage implementation
* @param options The file system options
* @param finalBlobIdentifier The final blob identifier to which to write
*/
GSRecoverableFsDataOutputStream(
GSBlobStorage storage,
GSFileSystemOptions options,
GSBlobIdentifier finalBlobIdentifier) {
LOGGER.debug(
"Creating new GSRecoverableFsDataOutputStream for blob {} with options {}",
finalBlobIdentifier,
options);
this.storage = Preconditions.checkNotNull(storage);
this.options = Preconditions.checkNotNull(options);
this.finalBlobIdentifier = Preconditions.checkNotNull(finalBlobIdentifier);
this.position = 0;
this.closed = false;
this.componentObjectIds = new ArrayList<>();
}
/**
* Constructs an output stream from a recoverable.
*
* @param storage The storage implementation
* @param options The file system options
* @param recoverable The recoverable
*/
GSRecoverableFsDataOutputStream(
GSBlobStorage storage, GSFileSystemOptions options, GSResumeRecoverable recoverable) {
LOGGER.debug(
"Recovering GSRecoverableFsDataOutputStream for blob {} with options {}",
recoverable.finalBlobIdentifier,
options);
this.storage = Preconditions.checkNotNull(storage);
this.options = Preconditions.checkNotNull(options);
this.finalBlobIdentifier = Preconditions.checkNotNull(recoverable.finalBlobIdentifier);
Preconditions.checkArgument(recoverable.position >= 0);
this.position = recoverable.position;
this.closed = recoverable.closed;
this.componentObjectIds = new ArrayList<>(recoverable.componentObjectIds);
}
@Override
public long getPos() throws IOException {
return position;
}
@Override
public void write(int byteValue) throws IOException {
byte[] bytes = new byte[] {(byte) byteValue};
write(bytes);
}
@Override
public void write(@Nonnull byte[] content) throws IOException {
Preconditions.checkNotNull(content);
write(content, 0, content.length);
}
@Override
public void write(@Nonnull byte[] content, int start, int length) throws IOException {
Preconditions.checkNotNull(content);
Preconditions.checkArgument(start >= 0);
Preconditions.checkArgument(length >= 0);
// if the data stream is already closed, throw an exception
if (closed) {
throw new IOException("Illegal attempt to write to closed output stream");
}
// if necessary, create a write channel
if (currentWriteChannel == null) {
LOGGER.debug("Creating write channel for blob {}", finalBlobIdentifier);
currentWriteChannel = createWriteChannel();
}
// write to the stream. the docs say that, in some circumstances, though an attempt will be
// made to write all of the requested bytes, there are some cases where only some bytes will
// be written. it's not clear whether this could ever happen with a Google storage
// WriteChannel; in any case, recoverable writers don't support partial writes, so if this
// ever happens, we must fail the write.:
// https://docs.oracle.com/javase/7/docs/api/java/nio/channels/WritableByteChannel.html#write(java.nio.ByteBuffer)
LOGGER.trace("Writing {} bytes", length);
int bytesWritten = currentWriteChannel.write(content, start, length);
if (bytesWritten != length) {
throw new IOException(
String.format(
"WriteChannel.write wrote %d of %d requested bytes, failing.",
bytesWritten, length));
}
// update count of total bytes written
position += bytesWritten;
}
@Override
public void flush() throws IOException {
LOGGER.trace("Flushing write channel for blob {}", finalBlobIdentifier);
closeWriteChannelIfExists();
}
@Override
public void sync() throws IOException {
LOGGER.trace("Syncing write channel for blob {}", finalBlobIdentifier);
closeWriteChannelIfExists();
}
@Override
public RecoverableWriter.ResumeRecoverable persist() throws IOException {
LOGGER.trace("Persisting write channel for blob {}", finalBlobIdentifier);
closeWriteChannelIfExists();
return createResumeRecoverable();
}
@Override
public void close() throws IOException {
LOGGER.trace("Closing write channel for blob {}", finalBlobIdentifier);
closeWriteChannelIfExists();
closed = true;
}
@Override
public Committer closeForCommit() throws IOException {
LOGGER.trace("Closing write channel for commit for blob {}", finalBlobIdentifier);
close();
return new GSRecoverableWriterCommitter(storage, options, createCommitRecoverable());
}
private GSCommitRecoverable createCommitRecoverable() {
return new GSCommitRecoverable(finalBlobIdentifier, componentObjectIds);
}
private GSResumeRecoverable createResumeRecoverable() {
return new GSResumeRecoverable(finalBlobIdentifier, componentObjectIds, position, closed);
}
private GSChecksumWriteChannel createWriteChannel() {
// add a new component blob id for the new channel to write to
UUID componentObjectId = UUID.randomUUID();
componentObjectIds.add(componentObjectId);
GSBlobIdentifier blobIdentifier =
BlobUtils.getTemporaryBlobIdentifier(
finalBlobIdentifier, componentObjectId, options);
// create the channel, using an explicit chunk size if specified in options
Optional<MemorySize> writerChunkSize = options.getWriterChunkSize();
GSBlobStorage.WriteChannel writeChannel =
writerChunkSize.isPresent()
? storage.writeBlob(blobIdentifier, writerChunkSize.get())
: storage.writeBlob(blobIdentifier);
return new GSChecksumWriteChannel(storage, writeChannel, blobIdentifier);
}
private void closeWriteChannelIfExists() throws IOException {
if (currentWriteChannel != null) {
// close the channel, this causes all written data to be committed.
// note that this also validates checksums and will throw an exception
// if they don't match
currentWriteChannel.close();
currentWriteChannel = null;
}
}
}
| GSRecoverableFsDataOutputStream |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/LambdaFunctionalInterfaceTest.java | {
"start": 6264,
"end": 7241
} | class ____ {
public double fooIntToDoubleFunction(int x, Function<Integer, Double> fn) {
return fn.apply(x).doubleValue();
}
public void fooIntToDoubleUtil(int y, IntToDoubleFunction fn) {
fn.applyAsDouble(y);
}
public long fooIntToLongFunction(int x, Function<Integer, Long> fn) {
return fn.apply(x);
}
public long fooIntToIntFunction(int x, Function<Integer, Long> fn) {
return fn.apply(x);
}
public double fooDoubleToDoubleFunction(double x, Function<Double, Double> fn) {
return fn.apply(x);
}
public int fooDoubleToIntFunction(double x, Function<Double, Integer> fn) {
return fn.apply(x);
}
public String add(String string, Function<String, String> func) {
return func.apply(string);
}
public void fooInterface(String str, Function<Integer, Double> func) {}
public double fooDouble(double x, Function<Double, Integer> fn) {
return fn.apply(x);
}
public static | LambdaFunctionalInterfaceNegativeCases |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FunctionalInterfaceMethodChangedTest.java | {
"start": 3158,
"end": 3293
} | interface ____ {
String superSam();
}
@FunctionalInterface
| ValueReturningSuperFI |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/AbstractDeleteMethodTest.java | {
"start": 164,
"end": 463
} | class ____ {
@Test
void shouldNotDeleteNonExistentObject() {
when().delete("/items/100")
.then().statusCode(404);
}
@Test
void shouldDeleteObject() {
when().delete("/items/1")
.then().statusCode(204);
}
}
| AbstractDeleteMethodTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportResourceTests.java | {
"start": 7038,
"end": 7274
} | class ____
extends org.springframework.beans.factory.support.PropertiesBeanDefinitionReader {
PrivatePropertiesBeanDefinitionReader(BeanDefinitionRegistry registry) {
super(registry);
}
}
}
| PrivatePropertiesBeanDefinitionReader |
java | playframework__playframework | persistence/play-java-jpa/src/test/java/play/db/jpa/JPAApiTest.java | {
"start": 615,
"end": 7936
} | class ____ {
@Rule public TestDatabase db = new TestDatabase();
private Set<String> getConfiguredPersistenceUnitNames(String configString) {
Config overrides = ConfigFactory.parseString(configString);
Config config = overrides.withFallback(ConfigFactory.load());
return new JPAConfigProvider(config)
.get().persistenceUnits().stream().map(unit -> unit.unitName).collect(Collectors.toSet());
}
@Test
public void shouldWorkWithEmptyConfiguration() {
String configString = "";
Set<String> unitNames = getConfiguredPersistenceUnitNames(configString);
assertThat(unitNames).isEmpty();
}
@Test
public void shouldWorkWithSingleValue() {
String configString = "jpa.default = defaultPersistenceUnit";
Set<String> unitNames = getConfiguredPersistenceUnitNames(configString);
assertThat(unitNames).containsOnly("defaultPersistenceUnit");
}
@Test
public void shouldWorkWithMultipleValues() {
String configString = "jpa.default = defaultPersistenceUnit\n" + "jpa.number2 = number2Unit";
Set<String> unitNames = getConfiguredPersistenceUnitNames(configString);
assertThat(unitNames).containsOnly("defaultPersistenceUnit", "number2Unit");
}
@Test
public void shouldWorkWithEmptyConfigurationAtConfiguredLocation() {
String configString = "play.jpa.config = myconfig.jpa";
Set<String> unitNames = getConfiguredPersistenceUnitNames(configString);
assertThat(unitNames).isEmpty();
}
@Test
public void shouldWorkWithSingleValueAtConfiguredLocation() {
String configString =
"play.jpa.config = myconfig.jpa\n" + "myconfig.jpa.default = defaultPersistenceUnit";
Set<String> unitNames = getConfiguredPersistenceUnitNames(configString);
assertThat(unitNames).containsOnly("defaultPersistenceUnit");
}
@Test
public void shouldWorkWithMultipleValuesAtConfiguredLocation() {
String configString =
"play.jpa.config = myconfig.jpa\n"
+ "myconfig.jpa.default = defaultPersistenceUnit\n"
+ "myconfig.jpa.number2 = number2Unit";
Set<String> unitNames = getConfiguredPersistenceUnitNames(configString);
assertThat(unitNames).containsOnly("defaultPersistenceUnit", "number2Unit");
}
@Test
public void shouldBeAbleToGetAnEntityManagerWithAGivenName() {
EntityManager em = db.jpa.em("default");
assertThat(em).isNotNull();
}
@Test
public void shouldExecuteAFunctionBlockUsingAEntityManager() {
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = createTestEntity();
entityManager.persist(entity);
return entity;
});
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = TestEntity.find(1L, entityManager);
assertThat(entity.name).isEqualTo("alice");
});
}
@Test
public void shouldExecuteAFunctionBlockUsingASpecificNamedEntityManager() {
db.jpa.withTransaction(
"default",
entityManager -> {
TestEntity entity = createTestEntity();
entityManager.persist(entity);
return entity;
});
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = TestEntity.find(1L, entityManager);
assertThat(entity.name).isEqualTo("alice");
});
}
@Test
public void shouldExecuteAFunctionBlockAsAReadOnlyTransaction() {
db.jpa.withTransaction(
"default",
true,
entityManager -> {
TestEntity entity = createTestEntity();
entityManager.persist(entity);
return entity;
});
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = TestEntity.find(1L, entityManager);
assertThat(entity).isNull();
});
}
private TestEntity createTestEntity() {
return createTestEntity(1L);
}
private TestEntity createTestEntity(Long id) {
TestEntity entity = new TestEntity();
entity.id = id;
entity.name = "alice";
return entity;
}
@Test
public void shouldExecuteASupplierBlockInsideATransaction() throws Exception {
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = createTestEntity();
entity.save(entityManager);
});
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = TestEntity.find(1L, entityManager);
assertThat(entity.name).isEqualTo("alice");
});
}
@Test
public void shouldNestTransactions() {
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = new TestEntity();
entity.id = 2L;
entity.name = "test2";
entity.save(entityManager);
db.jpa.withTransaction(
entityManagerInner -> {
TestEntity entity2 = TestEntity.find(2L, entityManagerInner);
assertThat(entity2).isNull();
});
// Verify that we can still access the EntityManager
TestEntity entity3 = TestEntity.find(2L, entityManager);
assertThat(entity3).isEqualTo(entity);
});
}
@Test
public void shouldRollbackInnerTransactionOnly() {
db.jpa.withTransaction(
entityManager -> {
// Parent transaction creates entity 2
TestEntity entity = createTestEntity(2L);
entity.save(entityManager);
db.jpa.withTransaction(
entityManagerInner -> {
// Nested transaction creates entity 3, but rolls back
TestEntity entity2 = createTestEntity(3L);
entity2.save(entityManagerInner);
entityManagerInner.getTransaction().setRollbackOnly();
});
// Verify that we can still access the EntityManager
TestEntity entity3 = TestEntity.find(2L, entityManager);
assertThat(entity3).isEqualTo(entity);
});
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = TestEntity.find(3L, entityManager);
assertThat(entity).isNull();
TestEntity entity2 = TestEntity.find(2L, entityManager);
assertThat(entity2.name).isEqualTo("alice");
});
}
@Test
public void shouldRollbackOuterTransactionOnly() {
db.jpa.withTransaction(
entityManager -> {
// Parent transaction creates entity 2, but rolls back
TestEntity entity = createTestEntity(2L);
entity.save(entityManager);
db.jpa.withTransaction(
entityManagerInner -> {
// Nested transaction creates entity 3
TestEntity entity2 = createTestEntity(3L);
entity2.save(entityManagerInner);
});
// Verify that we can still access the EntityManager
TestEntity entity3 = TestEntity.find(2L, entityManager);
assertThat(entity3).isEqualTo(entity);
entityManager.getTransaction().setRollbackOnly();
});
db.jpa.withTransaction(
entityManager -> {
TestEntity entity = TestEntity.find(3L, entityManager);
assertThat(entity.name).isEqualTo("alice");
TestEntity entity2 = TestEntity.find(2L, entityManager);
assertThat(entity2).isNull();
});
}
public static | JPAApiTest |
java | quarkusio__quarkus | devtools/cli/src/main/java/io/quarkus/cli/create/ExtensionCodeGenerationGroup.java | {
"start": 96,
"end": 1761
} | class ____ {
@CommandLine.Option(names = { "-C",
"--codestart" }, description = "Generate extension codestart", negatable = true)
boolean codestart = false;
@CommandLine.Option(names = { "--no-unit-test" }, description = "Generate unit tests", negatable = true)
boolean unitTest = true;
@CommandLine.Option(names = {
"--no-it-test" }, description = "Generate integration test", negatable = true)
boolean integrationTests = true;
@CommandLine.Option(names = {
"--no-devmode-test" }, description = "Generate dev mode tests", negatable = true)
boolean devModeTest = true;
@CommandLine.Option(names = {
"--without-tests" }, description = "Do not generate any tests (disable all)")
Optional<Boolean> withoutTests;
public boolean withCodestart() {
return codestart;
}
public boolean skipUnitTest() {
return withoutTests.orElse(!unitTest);
}
public boolean skipIntegrationTests() {
return withoutTests.orElse(!integrationTests);
}
public boolean skipDevModeTest() {
return withoutTests.orElse(!devModeTest);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ExtensionCodeGenerationGroup{");
sb.append("codestart=").append(codestart);
sb.append(", unitTest=").append(unitTest);
sb.append(", integrationTests=").append(integrationTests);
sb.append(", devModeTest=").append(devModeTest);
sb.append(", withoutTests=").append(withoutTests);
sb.append('}');
return sb.toString();
}
}
| ExtensionCodeGenerationGroup |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/physical/stream/StreamPhysicalMultiJoin.java | {
"start": 3175,
"end": 18261
} | class ____ extends AbstractRelNode implements StreamPhysicalRel {
private List<RelNode> inputs;
private final RexNode joinFilter;
private final List<JoinRelType> joinTypes;
private final List<? extends @Nullable RexNode> joinConditions;
private final JoinKeyExtractor keyExtractor;
/**
* A map from input index to a list of {@link ConditionAttributeRef}. Each {@link
* ConditionAttributeRef} represents a join condition associated with the input.
*/
private final Map<Integer, List<ConditionAttributeRef>> joinAttributeMap;
private final @Nullable RexNode postJoinFilter;
private final List<RelHint> hints;
// Cached derived properties to avoid recomputation
private @Nullable RexNode multiJoinCondition;
private @Nullable List<List<int[]>> inputUniqueKeys;
public StreamPhysicalMultiJoin(
final RelOptCluster cluster,
final RelTraitSet traitSet,
final List<RelNode> inputs,
final RexNode joinFilter,
final RelDataType rowType,
final List<? extends @Nullable RexNode> joinConditions,
final List<JoinRelType> joinTypes,
final Map<Integer, List<ConditionAttributeRef>> joinAttributeMap,
final @Nullable RexNode postJoinFilter,
final List<RelHint> hints,
final JoinKeyExtractor keyExtractor) {
super(cluster, traitSet);
this.inputs = inputs;
this.rowType = rowType;
this.joinFilter = joinFilter;
this.joinTypes = joinTypes;
this.joinConditions = joinConditions;
this.joinAttributeMap = joinAttributeMap;
this.postJoinFilter = postJoinFilter;
this.hints = hints;
this.keyExtractor = keyExtractor;
this.multiJoinCondition = getMultiJoinCondition();
this.inputUniqueKeys = getUniqueKeysForInputs();
}
@Override
public boolean requireWatermark() {
return false;
}
@Override
public List<RelNode> getInputs() {
return inputs;
}
@Override
public void replaceInput(final int ordinalInParent, final RelNode p) {
assert ordinalInParent >= 0 && ordinalInParent < inputs.size();
final List<RelNode> newInputs = new ArrayList<>(inputs);
newInputs.set(ordinalInParent, p);
this.inputs = List.copyOf(newInputs);
// Invalidate cached derived properties since inputs changed
this.multiJoinCondition = null;
this.inputUniqueKeys = null;
recomputeDigest();
}
@Override
public RelNode copy(final RelTraitSet traitSet, final List<RelNode> inputs) {
return new StreamPhysicalMultiJoin(
getCluster(),
traitSet,
inputs,
joinFilter,
getRowType(),
joinConditions,
joinTypes,
joinAttributeMap,
postJoinFilter,
hints,
keyExtractor);
}
@Override
public RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadataQuery mq) {
final double elementRate = 100.0d * getInputs().size();
return planner.getCostFactory().makeCost(elementRate, elementRate, 0);
}
@Override
public RelWriter explainTerms(final RelWriter pw) {
super.explainTerms(pw);
for (final Ord<RelNode> ord : Ord.zip(inputs)) {
pw.input("input#" + ord.i, ord.e);
}
return pw.item("commonJoinKey", getCommonJoinKeyFieldNames())
.item("joinTypes", formatJoinTypes())
.item("inputUniqueKeys", formatInputUniqueKeysWithFieldNames())
.itemIf("stateTtlHints", RelExplainUtil.hintsToString(hints), !hints.isEmpty())
.item("joinConditions", formatJoinConditionsWithFieldNames(pw))
.itemIf(
"postJoinFilter",
formatExpressionWithFieldNames(postJoinFilter, pw),
postJoinFilter != null)
.item("select", String.join(",", getRowType().getFieldNames()))
.item("rowType", getRowType());
}
@Override
protected RelDataType deriveRowType() {
return rowType;
}
@Override
public ExecNode<?> translateToExecNode() {
final RexNode multijoinCondition = getMultiJoinCondition();
final List<List<int[]>> localInputUniqueKeys = getUniqueKeysForInputs();
final List<FlinkJoinType> execJoinTypes = getExecJoinTypes();
final List<InputProperty> inputProperties = createInputProperties();
return new StreamExecMultiJoin(
unwrapTableConfig(this),
execJoinTypes,
joinConditions,
multijoinCondition,
joinAttributeMap,
localInputUniqueKeys,
StateTtlHint.getStateTtlFromHintOnMultiRel(this.hints),
inputProperties,
FlinkTypeFactory.toLogicalRowType(getRowType()),
getRelDetailedDescription());
}
private RexNode createMultiJoinCondition() {
final List<RexNode> conjunctions = new ArrayList<>();
for (RexNode joinCondition : joinConditions) {
if (joinCondition != null) {
conjunctions.add(joinCondition);
}
}
conjunctions.add(joinFilter);
if (postJoinFilter != null) {
conjunctions.add(postJoinFilter);
}
return RexUtil.composeConjunction(getCluster().getRexBuilder(), conjunctions, true);
}
public List<List<int[]>> getUniqueKeysForInputs() {
if (inputUniqueKeys == null) {
inputUniqueKeys =
inputs.stream()
.map(
input -> {
final Set<ImmutableBitSet> uniqueKeys =
getUniqueKeys(input);
if (uniqueKeys == null) {
return Collections.<int[]>emptyList();
}
return uniqueKeys.stream()
.map(ImmutableBitSet::toArray)
.collect(Collectors.toList());
})
.collect(Collectors.toUnmodifiableList());
}
return inputUniqueKeys;
}
public int[] getJoinKeyIndices(int inputId) {
return keyExtractor.getJoinKeyIndices(inputId);
}
private @Nullable Set<ImmutableBitSet> getUniqueKeys(RelNode input) {
final FlinkRelMetadataQuery fmq =
FlinkRelMetadataQuery.reuseOrCreate(input.getCluster().getMetadataQuery());
return fmq.getUniqueKeys(input);
}
public RexNode getMultiJoinCondition() {
if (multiJoinCondition == null) {
multiJoinCondition = createMultiJoinCondition();
}
return multiJoinCondition;
}
private List<FlinkJoinType> getExecJoinTypes() {
return joinTypes.stream()
.map(
joinType -> {
if (joinType == JoinRelType.INNER) {
return FlinkJoinType.INNER;
} else if (joinType == JoinRelType.LEFT) {
return FlinkJoinType.LEFT;
} else {
throw new UnsupportedOperationException(
"Unsupported join type: " + joinType);
}
})
.collect(Collectors.toList());
}
public List<JoinRelType> getJoinTypes() {
return joinTypes;
}
/**
* Returns the common join key field names as a comma-separated string. Uses the field names
* from the first input to map the common join key indices.
*
* @return comma-separated string of common join key field names, or empty string if no common
* join key
*/
private String getCommonJoinKeyFieldNames() {
final int[] commonJoinKeyIndices = keyExtractor.getCommonJoinKeyIndices(0);
final RelNode firstInput = inputs.get(0);
final List<String> fieldNames = firstInput.getRowType().getFieldNames();
final List<String> commonJoinKey = new ArrayList<>();
for (final int index : commonJoinKeyIndices) {
if (index < fieldNames.size()) {
commonJoinKey.add(fieldNames.get(index));
}
}
if (commonJoinKey.isEmpty()) {
return "noCommonJoinKey";
}
return String.join(", ", commonJoinKey);
}
/**
* Formats a RexNode expression with field names for better readability in explain output.
*
* @param expression the expression to format
* @param pw the RelWriter for determining format preferences
* @return formatted expression string with field names
*/
private String formatExpressionWithFieldNames(final RexNode expression, final RelWriter pw) {
if (expression == null) {
return "";
}
return getExpressionString(
expression,
JavaScalaConversionUtil.toScala(getRowType().getFieldNames()).toList(),
JavaScalaConversionUtil.toScala(Optional.empty()),
RelExplainUtil.preferExpressionFormat(pw),
RelExplainUtil.preferExpressionDetail(pw));
}
/**
* Formats join conditions with field names for better readability in explain output.
*
* @param pw the RelWriter for determining format preferences
* @return formatted join conditions string with field names
*/
private String formatJoinConditionsWithFieldNames(final RelWriter pw) {
return joinConditions.stream()
.skip(1)
.filter(Objects::nonNull)
.map(condition -> formatExpressionWithFieldNames(condition, pw))
.collect(Collectors.joining(", "));
}
private String formatJoinTypes() {
return joinTypes.stream()
.skip(1)
.map(JoinRelType::toString)
.collect(Collectors.joining(", "));
}
private String formatInputUniqueKeysWithFieldNames() {
final List<String> inputUniqueKeyStrings = new ArrayList<>();
for (final RelNode input : inputs) {
final Set<ImmutableBitSet> uniqueKeys = getUniqueKeys(input);
if (uniqueKeys != null && !uniqueKeys.isEmpty()) {
final List<String> fieldNames = input.getRowType().getFieldNames();
final List<String> uniqueKeyStrings = new ArrayList<>();
for (final ImmutableBitSet uniqueKey : uniqueKeys) {
final List<String> keyFieldNames = new ArrayList<>();
for (final int index : uniqueKey.toArray()) {
if (index < fieldNames.size()) {
keyFieldNames.add(fieldNames.get(index));
}
}
if (!keyFieldNames.isEmpty()) {
uniqueKeyStrings.add("(" + String.join(", ", keyFieldNames) + ")");
}
}
inputUniqueKeyStrings.add(String.join(", ", uniqueKeyStrings));
} else {
inputUniqueKeyStrings.add("noUniqueKey");
}
}
return String.join(", ", inputUniqueKeyStrings);
}
/**
* This is mainly used in `FlinkChangelogModeInferenceProgram.SatisfyUpdateKindTraitVisitor`. If
* the unique key of input is a superset of the common join key, then we can ignore
* UPDATE_BEFORE. Otherwise, we can't ignore UPDATE_BEFORE.
*
* <p>For example, if the input schema is [id, name, cnt] with the unique key (id) and the
* common join key is (id, name) across joins, then an insert and update on the id:
*
* <p>+I(1001, Tim, 10) -U(1001, Tim, 10) +U(1001, Timo, 11)
*
* <p>If the UPDATE_BEFORE is ignored, the `+I(1001, Tim, 10)` record in join will never be
* retracted. Therefore, if we want to ignore UPDATE_BEFORE, the unique key must contain join
* key.
*
* <p>This is similar to {@link StreamPhysicalJoin#inputUniqueKeyContainsJoinKey(int)} but here
* we use the common join key, since the multi join operator partitions on the common join key.
*/
public boolean inputUniqueKeyContainsCommonJoinKey(int inputId) {
final RelNode input = getInputs().get(inputId);
final Set<ImmutableBitSet> inputUniqueKeysSet = getUniqueKeys(input);
if (inputUniqueKeysSet == null || inputUniqueKeysSet.isEmpty()) {
return false;
}
final int[] commonJoinKeyIndices = keyExtractor.getCommonJoinKeyIndices(inputId);
if (commonJoinKeyIndices.length == 0) {
return false;
}
final ImmutableBitSet commonJoinKeys = ImmutableBitSet.of(commonJoinKeyIndices);
return inputUniqueKeysSet.stream()
.anyMatch(uniqueKey -> uniqueKey.contains(commonJoinKeys));
}
private List<InputProperty> createInputProperties() {
final List<InputProperty> inputProperties = new ArrayList<>();
for (int i = 0; i < inputs.size(); i++) {
final InputProperty inputProperty = createInputPropertyFromTrait(getInput(i), i);
inputProperties.add(inputProperty);
}
return inputProperties;
}
private InputProperty createInputPropertyFromTrait(final RelNode input, final int inputIndex) {
final FlinkRelDistribution distribution =
input.getTraitSet().getTrait(FlinkRelDistributionTraitDef.INSTANCE());
if (distribution == null) {
return InputProperty.DEFAULT;
}
final InputProperty.RequiredDistribution requiredDistribution;
switch (distribution.getType()) {
case HASH_DISTRIBUTED:
final int[] keys = distribution.getKeys().toIntArray();
if (keys.length == 0) {
requiredDistribution = InputProperty.SINGLETON_DISTRIBUTION;
} else {
requiredDistribution = InputProperty.hashDistribution(keys);
}
break;
case SINGLETON:
requiredDistribution = InputProperty.SINGLETON_DISTRIBUTION;
break;
default:
return InputProperty.DEFAULT;
}
return InputProperty.builder()
.requiredDistribution(requiredDistribution)
.damBehavior(InputProperty.DamBehavior.PIPELINED)
.priority(inputIndex)
.build();
}
}
| StreamPhysicalMultiJoin |
java | apache__flink | flink-metrics/flink-metrics-influxdb/src/test/java/org/apache/flink/metrics/influxdb/MetricMapperTest.java | {
"start": 1482,
"end": 4193
} | class ____ {
private static final String NAME = "a-metric-name";
private static final MeasurementInfo INFO = getMeasurementInfo(NAME);
private static final Instant TIMESTAMP = Instant.now();
@Test
void testMapGauge() {
verifyPoint(MetricMapper.map(INFO, TIMESTAMP, (Gauge<Number>) () -> 42), "value=42");
verifyPoint(MetricMapper.map(INFO, TIMESTAMP, (Gauge<Number>) () -> null), "value=null");
verifyPoint(
MetricMapper.map(INFO, TIMESTAMP, (Gauge<String>) () -> "hello"), "value=hello");
verifyPoint(MetricMapper.map(INFO, TIMESTAMP, (Gauge<Long>) () -> 42L), "value=42");
}
@Test
void testMapCounter() {
Counter counter = new SimpleCounter();
counter.inc(42L);
verifyPoint(MetricMapper.map(INFO, TIMESTAMP, counter), "count=42");
}
@Test
void testMapHistogram() {
Histogram histogram = new TestHistogram();
verifyPoint(
MetricMapper.map(INFO, TIMESTAMP, histogram),
"count=3",
"max=6",
"mean=4.0",
"min=7",
"p50=0.5",
"p75=0.75",
"p95=0.95",
"p98=0.98",
"p99=0.99",
"p999=0.999",
"stddev=5.0");
}
@Test
void testMapMeter() {
Meter meter = new TestMeter();
verifyPoint(MetricMapper.map(INFO, TIMESTAMP, meter), "count=100", "rate=5.0");
}
private void verifyPoint(Point point, String... expectedFields) {
// Most methods of Point are package private. We use toString() method to check that values
// are as expected.
// An alternative can be to call lineProtocol() method, which additionally escapes values
// for InfluxDB format.
assertThat(point.toString())
.isEqualTo(
"Point [name="
+ NAME
+ ", time="
+ TIMESTAMP.toEpochMilli()
+ ", tags={tag-1=42, tag-2=green}"
+ ", precision=MILLISECONDS"
+ ", fields={"
+ String.join(", ", expectedFields)
+ "}"
+ "]",
point.toString());
}
private static MeasurementInfo getMeasurementInfo(String name) {
Map<String, String> tags = new HashMap<>();
tags.put("tag-1", "42");
tags.put("tag-2", "green");
return new MeasurementInfo(name, tags);
}
}
| MetricMapperTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/issues/BeanRouteToDerivedClassTest.java | {
"start": 1179,
"end": 1821
} | class ____ extends ContextTestSupport {
private final DerivedClass derived = new DerivedClass();
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testDerivedClassCalled() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("bean:derived?method=process");
}
});
context.start();
template.sendBody("direct:start", "Hello World");
assertEquals("Hello World", derived.getAndClearBody(), "Derived | BeanRouteToDerivedClassTest |
java | google__guava | android/guava-tests/test/com/google/common/collect/MultimapsTest.java | {
"start": 3094,
"end": 22354
} | class ____ extends TestCase {
private static final Comparator<Integer> INT_COMPARATOR =
Ordering.<Integer>natural().reverse().nullsFirst();
@SuppressWarnings({"deprecation", "InlineMeInliner"}) // test of a deprecated method
public void testUnmodifiableListMultimapShortCircuit() {
ListMultimap<String, Integer> mod = ArrayListMultimap.create();
ListMultimap<String, Integer> unmod = Multimaps.unmodifiableListMultimap(mod);
assertNotSame(mod, unmod);
assertSame(unmod, Multimaps.unmodifiableListMultimap(unmod));
ImmutableListMultimap<String, Integer> immutable =
ImmutableListMultimap.of("a", 1, "b", 2, "a", 3);
assertSame(immutable, Multimaps.unmodifiableListMultimap(immutable));
assertSame(
immutable, Multimaps.unmodifiableListMultimap((ListMultimap<String, Integer>) immutable));
}
@SuppressWarnings({"deprecation", "InlineMeInliner"}) // test of a deprecated method
public void testUnmodifiableSetMultimapShortCircuit() {
SetMultimap<String, Integer> mod = HashMultimap.create();
SetMultimap<String, Integer> unmod = Multimaps.unmodifiableSetMultimap(mod);
assertNotSame(mod, unmod);
assertSame(unmod, Multimaps.unmodifiableSetMultimap(unmod));
ImmutableSetMultimap<String, Integer> immutable =
ImmutableSetMultimap.of("a", 1, "b", 2, "a", 3);
assertSame(immutable, Multimaps.unmodifiableSetMultimap(immutable));
assertSame(
immutable, Multimaps.unmodifiableSetMultimap((SetMultimap<String, Integer>) immutable));
}
@SuppressWarnings({"deprecation", "InlineMeInliner"}) // test of a deprecated method
public void testUnmodifiableMultimapShortCircuit() {
Multimap<String, Integer> mod = HashMultimap.create();
Multimap<String, Integer> unmod = Multimaps.unmodifiableMultimap(mod);
assertNotSame(mod, unmod);
assertSame(unmod, Multimaps.unmodifiableMultimap(unmod));
ImmutableMultimap<String, Integer> immutable = ImmutableMultimap.of("a", 1, "b", 2, "a", 3);
assertSame(immutable, Multimaps.unmodifiableMultimap(immutable));
assertSame(immutable, Multimaps.unmodifiableMultimap((Multimap<String, Integer>) immutable));
}
@GwtIncompatible // slow (~10s)
public void testUnmodifiableArrayListMultimap() {
checkUnmodifiableMultimap(
ArrayListMultimap.<@Nullable String, @Nullable Integer>create(), true);
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testSerializingUnmodifiableArrayListMultimap() {
Multimap<String, Integer> unmodifiable =
prepareUnmodifiableTests(ArrayListMultimap.<String, Integer>create(), true, null, null);
SerializableTester.reserializeAndAssert(unmodifiable);
}
public void testUnmodifiableArrayListMultimapRandomAccess() {
ListMultimap<String, Integer> delegate = ArrayListMultimap.create();
delegate.put("foo", 1);
delegate.put("foo", 3);
ListMultimap<String, Integer> multimap = Multimaps.unmodifiableListMultimap(delegate);
assertTrue(multimap.get("foo") instanceof RandomAccess);
assertTrue(multimap.get("bar") instanceof RandomAccess);
}
public void testUnmodifiableLinkedListMultimapRandomAccess() {
ListMultimap<String, Integer> delegate = LinkedListMultimap.create();
delegate.put("foo", 1);
delegate.put("foo", 3);
ListMultimap<String, Integer> multimap = Multimaps.unmodifiableListMultimap(delegate);
assertFalse(multimap.get("foo") instanceof RandomAccess);
assertFalse(multimap.get("bar") instanceof RandomAccess);
}
@GwtIncompatible // slow (~10s)
public void testUnmodifiableHashMultimap() {
checkUnmodifiableMultimap(HashMultimap.<@Nullable String, @Nullable Integer>create(), false);
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testSerializingUnmodifiableHashMultimap() {
Multimap<String, Integer> unmodifiable =
prepareUnmodifiableTests(HashMultimap.<String, Integer>create(), false, null, null);
SerializableTester.reserializeAndAssert(unmodifiable);
}
@GwtIncompatible // slow (~10s)
public void testUnmodifiableTreeMultimap() {
checkUnmodifiableMultimap(TreeMultimap.<String, Integer>create(), false, "null", 42);
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testSerializingUnmodifiableTreeMultimap() {
Multimap<String, Integer> unmodifiable =
prepareUnmodifiableTests(TreeMultimap.<String, Integer>create(), false, "null", 42);
SerializableTester.reserializeAndAssert(unmodifiable);
}
@GwtIncompatible // slow (~10s)
@J2ktIncompatible // Synchronized
public void testUnmodifiableSynchronizedArrayListMultimap() {
checkUnmodifiableMultimap(
synchronizedListMultimap(ArrayListMultimap.<@Nullable String, @Nullable Integer>create()),
true);
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testSerializingUnmodifiableSynchronizedArrayListMultimap() {
Multimap<String, Integer> unmodifiable =
prepareUnmodifiableTests(
synchronizedListMultimap(ArrayListMultimap.<String, Integer>create()),
true,
null,
null);
SerializableTester.reserializeAndAssert(unmodifiable);
}
@GwtIncompatible // slow (~10s)
@J2ktIncompatible // Synchronized
public void testUnmodifiableSynchronizedHashMultimap() {
checkUnmodifiableMultimap(
synchronizedSetMultimap(HashMultimap.<@Nullable String, @Nullable Integer>create()), false);
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testSerializingUnmodifiableSynchronizedHashMultimap() {
Multimap<String, Integer> unmodifiable =
prepareUnmodifiableTests(
synchronizedSetMultimap(HashMultimap.<String, Integer>create()), false, null, null);
SerializableTester.reserializeAndAssert(unmodifiable);
}
@GwtIncompatible // slow (~10s)
@J2ktIncompatible // Synchronized
public void testUnmodifiableSynchronizedTreeMultimap() {
TreeMultimap<String, Integer> delegate =
TreeMultimap.create(Ordering.<String>natural(), INT_COMPARATOR);
SortedSetMultimap<String, Integer> multimap = synchronizedSortedSetMultimap(delegate);
checkUnmodifiableMultimap(multimap, false, "null", 42);
assertSame(INT_COMPARATOR, multimap.valueComparator());
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testSerializingUnmodifiableSynchronizedTreeMultimap() {
TreeMultimap<String, Integer> delegate =
TreeMultimap.create(Ordering.<String>natural(), INT_COMPARATOR);
SortedSetMultimap<String, Integer> multimap = synchronizedSortedSetMultimap(delegate);
Multimap<String, Integer> unmodifiable = prepareUnmodifiableTests(multimap, false, "null", 42);
SerializableTester.reserializeAndAssert(unmodifiable);
assertSame(INT_COMPARATOR, multimap.valueComparator());
}
public void testUnmodifiableMultimapIsView() {
Multimap<String, Integer> mod = HashMultimap.create();
Multimap<String, Integer> unmod = Multimaps.unmodifiableMultimap(mod);
assertEquals(mod, unmod);
mod.put("foo", 1);
assertTrue(unmod.containsEntry("foo", 1));
assertEquals(mod, unmod);
}
@SuppressWarnings("unchecked")
public void testUnmodifiableMultimapEntries() {
Multimap<String, Integer> mod = HashMultimap.create();
Multimap<String, Integer> unmod = Multimaps.unmodifiableMultimap(mod);
mod.put("foo", 1);
Entry<String, Integer> fromIterator = unmod.entries().iterator().next();
assertThrows(UnsupportedOperationException.class, () -> fromIterator.setValue(2));
Entry<String, Integer> fromToArray = (Entry<String, Integer>) unmod.entries().toArray()[0];
assertThrows(UnsupportedOperationException.class, () -> fromToArray.setValue(2));
Entry<String, Integer>[] array = (Entry<String, Integer>[]) new Entry<?, ?>[2];
assertSame(array, unmod.entries().toArray(array));
assertThrows(UnsupportedOperationException.class, () -> array[0].setValue(2));
assertFalse(unmod.entries().contains(nefariousMapEntry("pwnd", 2)));
assertFalse(unmod.keys().contains("pwnd"));
}
/**
* The supplied multimap will be mutated and an unmodifiable instance used in its stead. The
* multimap must support null keys and values.
*/
private static void checkUnmodifiableMultimap(
Multimap<@Nullable String, @Nullable Integer> multimap, boolean permitsDuplicates) {
checkUnmodifiableMultimap(multimap, permitsDuplicates, null, null);
}
/**
* The supplied multimap will be mutated and an unmodifiable instance used in its stead. If the
* multimap does not support null keys or values, alternatives may be specified for tests
* involving nulls.
*/
private static void checkUnmodifiableMultimap(
Multimap<@Nullable String, @Nullable Integer> multimap,
boolean permitsDuplicates,
@Nullable String nullKey,
@Nullable Integer nullValue) {
Multimap<String, Integer> unmodifiable =
prepareUnmodifiableTests(multimap, permitsDuplicates, nullKey, nullValue);
UnmodifiableCollectionTests.assertMultimapIsUnmodifiable(unmodifiable, "test", 123);
assertUnmodifiableIterableInTandem(unmodifiable.keys(), multimap.keys());
assertUnmodifiableIterableInTandem(unmodifiable.keySet(), multimap.keySet());
assertUnmodifiableIterableInTandem(unmodifiable.entries(), multimap.entries());
assertUnmodifiableIterableInTandem(
unmodifiable.asMap().entrySet(), multimap.asMap().entrySet());
assertEquals(multimap.toString(), unmodifiable.toString());
assertEquals(multimap.hashCode(), unmodifiable.hashCode());
assertEquals(multimap, unmodifiable);
assertThat(unmodifiable.asMap().get("bar")).containsExactly(5, -1);
assertThat(unmodifiable.asMap().get("missing")).isNull();
assertFalse(unmodifiable.entries() instanceof Serializable);
}
/** Prepares the multimap for unmodifiable tests, returning an unmodifiable view of the map. */
private static Multimap<@Nullable String, @Nullable Integer> prepareUnmodifiableTests(
Multimap<@Nullable String, @Nullable Integer> multimap,
boolean permitsDuplicates,
@Nullable String nullKey,
@Nullable Integer nullValue) {
multimap.clear();
multimap.put("foo", 1);
multimap.put("foo", 2);
multimap.put("foo", 3);
multimap.put("bar", 5);
multimap.put("bar", -1);
multimap.put(nullKey, nullValue);
multimap.put("foo", nullValue);
multimap.put(nullKey, 5);
multimap.put("foo", 2);
if (permitsDuplicates) {
assertEquals(9, multimap.size());
} else {
assertEquals(8, multimap.size());
}
Multimap<@Nullable String, @Nullable Integer> unmodifiable;
if (multimap instanceof SortedSetMultimap) {
unmodifiable =
Multimaps.unmodifiableSortedSetMultimap(
(SortedSetMultimap<@Nullable String, @Nullable Integer>) multimap);
} else if (multimap instanceof SetMultimap) {
unmodifiable =
Multimaps.unmodifiableSetMultimap(
(SetMultimap<@Nullable String, @Nullable Integer>) multimap);
} else if (multimap instanceof ListMultimap) {
unmodifiable =
Multimaps.unmodifiableListMultimap(
(ListMultimap<@Nullable String, @Nullable Integer>) multimap);
} else {
unmodifiable = Multimaps.unmodifiableMultimap(multimap);
}
return unmodifiable;
}
private static <T extends @Nullable Object> void assertUnmodifiableIterableInTandem(
Iterable<T> unmodifiable, Iterable<T> modifiable) {
UnmodifiableCollectionTests.assertIteratorIsUnmodifiable(unmodifiable.iterator());
UnmodifiableCollectionTests.assertIteratorsInOrder(
unmodifiable.iterator(), modifiable.iterator());
}
public void testInvertFrom() {
ImmutableMultimap<Integer, String> empty = ImmutableMultimap.of();
// typical usage example - sad that ArrayListMultimap.create() won't work
Multimap<String, Integer> multimap =
Multimaps.invertFrom(empty, ArrayListMultimap.<String, Integer>create());
assertTrue(multimap.isEmpty());
ImmutableMultimap<Integer, String> single =
new ImmutableMultimap.Builder<Integer, String>().put(1, "one").put(2, "two").build();
// copy into existing multimap
assertSame(multimap, Multimaps.invertFrom(single, multimap));
ImmutableMultimap<String, Integer> expected =
new ImmutableMultimap.Builder<String, Integer>().put("one", 1).put("two", 2).build();
assertEquals(expected, multimap);
}
public void testAsMap_multimap() {
Multimap<String, Integer> multimap =
Multimaps.newMultimap(new HashMap<String, Collection<Integer>>(), new QueueSupplier());
Map<String, Collection<Integer>> map = Multimaps.asMap(multimap);
assertSame(multimap.asMap(), map);
}
public void testAsMap_listMultimap() {
ListMultimap<String, Integer> listMultimap = ArrayListMultimap.create();
Map<String, List<Integer>> map = Multimaps.asMap(listMultimap);
assertSame(listMultimap.asMap(), map);
}
public void testAsMap_setMultimap() {
SetMultimap<String, Integer> setMultimap = LinkedHashMultimap.create();
Map<String, Set<Integer>> map = Multimaps.asMap(setMultimap);
assertSame(setMultimap.asMap(), map);
}
public void testAsMap_sortedSetMultimap() {
SortedSetMultimap<String, Integer> sortedSetMultimap = TreeMultimap.create();
Map<String, SortedSet<Integer>> map = Multimaps.asMap(sortedSetMultimap);
assertSame(sortedSetMultimap.asMap(), map);
}
public void testForMap() {
Map<String, Integer> map = new HashMap<>();
map.put("foo", 1);
map.put("bar", 2);
Multimap<String, Integer> multimap = HashMultimap.create();
multimap.put("foo", 1);
multimap.put("bar", 2);
Multimap<String, Integer> multimapView = Multimaps.forMap(map);
new EqualsTester().addEqualityGroup(multimap, multimapView).addEqualityGroup(map).testEquals();
Multimap<String, Integer> multimap2 = HashMultimap.create();
multimap2.put("foo", 1);
assertFalse(multimapView.equals(multimap2));
multimap2.put("bar", 1);
assertFalse(multimapView.equals(multimap2));
ListMultimap<String, Integer> listMultimap =
new ImmutableListMultimap.Builder<String, Integer>().put("foo", 1).put("bar", 2).build();
assertFalse("SetMultimap equals ListMultimap", multimapView.equals(listMultimap));
assertEquals(multimap.hashCode(), multimapView.hashCode());
assertEquals(multimap.size(), multimapView.size());
assertTrue(multimapView.containsKey("foo"));
assertTrue(multimapView.containsValue(1));
assertTrue(multimapView.containsEntry("bar", 2));
assertEquals(singleton(1), multimapView.get("foo"));
assertEquals(singleton(2), multimapView.get("bar"));
assertThrows(UnsupportedOperationException.class, () -> multimapView.put("baz", 3));
assertThrows(
UnsupportedOperationException.class, () -> multimapView.putAll("baz", singleton(3)));
assertThrows(UnsupportedOperationException.class, () -> multimapView.putAll(multimap));
assertThrows(
UnsupportedOperationException.class,
() -> multimapView.replaceValues("foo", Collections.<Integer>emptySet()));
multimapView.remove("bar", 2);
assertFalse(multimapView.containsKey("bar"));
assertFalse(map.containsKey("bar"));
assertEquals(map.keySet(), multimapView.keySet());
assertEquals(map.keySet(), multimapView.keys().elementSet());
assertThat(multimapView.keys()).contains("foo");
assertThat(multimapView.values()).contains(1);
assertThat(multimapView.entries()).contains(Maps.immutableEntry("foo", 1));
assertThat(multimapView.asMap().entrySet())
.contains(Maps.immutableEntry("foo", (Collection<Integer>) singleton(1)));
multimapView.clear();
assertFalse(multimapView.containsKey("foo"));
assertFalse(map.containsKey("foo"));
assertTrue(map.isEmpty());
assertTrue(multimapView.isEmpty());
multimap.clear();
assertEquals(multimap.toString(), multimapView.toString());
assertEquals(multimap.hashCode(), multimapView.hashCode());
assertEquals(multimap.size(), multimapView.size());
assertEquals(multimapView, ArrayListMultimap.create());
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testForMapSerialization() {
Map<String, Integer> map = new HashMap<>();
map.put("foo", 1);
map.put("bar", 2);
Multimap<String, Integer> multimapView = Multimaps.forMap(map);
SerializableTester.reserializeAndAssert(multimapView);
}
public void testForMapRemoveAll() {
Map<String, Integer> map = new HashMap<>();
map.put("foo", 1);
map.put("bar", 2);
map.put("cow", 3);
Multimap<String, Integer> multimap = Multimaps.forMap(map);
assertEquals(3, multimap.size());
assertEquals(emptySet(), multimap.removeAll("dog"));
assertEquals(3, multimap.size());
assertTrue(multimap.containsKey("bar"));
assertEquals(singleton(2), multimap.removeAll("bar"));
assertEquals(2, multimap.size());
assertFalse(multimap.containsKey("bar"));
}
public void testForMapAsMap() {
Map<String, Integer> map = new HashMap<>();
map.put("foo", 1);
map.put("bar", 2);
Map<String, Collection<Integer>> asMap = Multimaps.forMap(map).asMap();
assertEquals(singleton(1), asMap.get("foo"));
assertThat(asMap.get("cow")).isNull();
assertTrue(asMap.containsKey("foo"));
assertFalse(asMap.containsKey("cow"));
Set<Entry<String, Collection<Integer>>> entries = asMap.entrySet();
assertFalse(entries.contains((Object) 4.5));
assertFalse(entries.remove((Object) 4.5));
assertFalse(entries.contains(Maps.immutableEntry("foo", singletonList(1))));
assertFalse(entries.remove(Maps.immutableEntry("foo", singletonList(1))));
assertFalse(entries.contains(Maps.immutableEntry("foo", new LinkedHashSet<>(asList(1, 2)))));
assertFalse(entries.remove(Maps.immutableEntry("foo", new LinkedHashSet<>(asList(1, 2)))));
assertFalse(entries.contains(Maps.immutableEntry("foo", singleton(2))));
assertFalse(entries.remove(Maps.immutableEntry("foo", singleton(2))));
assertTrue(map.containsKey("foo"));
assertTrue(entries.contains(Maps.immutableEntry("foo", singleton(1))));
assertTrue(entries.remove(Maps.immutableEntry("foo", singleton(1))));
assertFalse(map.containsKey("foo"));
}
public void testForMapGetIteration() {
IteratorTester<Integer> tester =
new IteratorTester<Integer>(
4, MODIFIABLE, newHashSet(1), IteratorTester.KnownOrder.KNOWN_ORDER) {
private @Nullable Multimap<String, Integer> multimap;
@Override
protected Iterator<Integer> newTargetIterator() {
Map<String, Integer> map = new HashMap<>();
map.put("foo", 1);
map.put("bar", 2);
multimap = Multimaps.forMap(map);
return multimap.get("foo").iterator();
}
@Override
protected void verify(List<Integer> elements) {
assertEquals(new HashSet<>(elements), multimap.get("foo"));
}
};
tester.test();
}
private | MultimapsTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/HttpUtil.java | {
"start": 1672,
"end": 4563
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(HttpUtil.class);
private static final Base64 BASE_64_CODEC = new Base64(0);
protected HttpUtil() {
// prevents calls from subclass
throw new UnsupportedOperationException();
}
/**
* Generate SPNEGO challenge request token.
*
* @param server - hostname to contact
* @throws IOException
* @throws InterruptedException
*/
public static String generateToken(String server) throws
IOException, InterruptedException {
UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
LOG.debug("The user credential is {}", currentUser);
String challenge = currentUser
.doAs(new PrivilegedExceptionAction<String>() {
@Override
public String run() throws Exception {
try {
GSSManager manager = GSSManager.getInstance();
// GSS name for server
GSSName serverName = manager.createName("HTTP@" + server,
GSSName.NT_HOSTBASED_SERVICE);
// Create a GSSContext for authentication with the service.
// We're passing client credentials as null since we want them to
// be read from the Subject.
// We're passing Oid as null to use the default.
GSSContext gssContext = manager.createContext(
serverName.canonicalize(null), null, null,
GSSContext.DEFAULT_LIFETIME);
gssContext.requestMutualAuth(true);
gssContext.requestCredDeleg(true);
// Establish context
byte[] inToken = new byte[0];
byte[] outToken = gssContext.initSecContext(inToken, 0,
inToken.length);
gssContext.dispose();
// Base64 encoded and stringified token for server
LOG.debug("Got valid challenge for host {}", serverName);
return new String(BASE_64_CODEC.encode(outToken),
StandardCharsets.US_ASCII);
} catch (GSSException e) {
LOG.error("Error: ", e);
throw new AuthenticationException(e);
}
}
});
return challenge;
}
public static Invocation.Builder connect(String url) throws URISyntaxException,
IOException, InterruptedException {
boolean useKerberos = UserGroupInformation.isSecurityEnabled();
URI resource = new URI(url);
Client client = ClientBuilder.newClient();
Invocation.Builder builder = client
.target(url).request(MediaType.APPLICATION_JSON);
if (useKerberos) {
String challenge = generateToken(resource.getHost());
builder.header(HttpHeaders.AUTHORIZATION, "Negotiate " +
challenge);
LOG.debug("Authorization: Negotiate {}", challenge);
}
return builder;
}
}
| HttpUtil |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/bugs/deepstubs/DeepStubsWronglyReportsSerializationProblemsTest.java | {
"start": 807,
"end": 990
} | class ____ {
public ToBeDeepStubbed() {}
public NotSerializableShouldBeMocked getSomething() {
return null;
}
}
public static | ToBeDeepStubbed |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/uri/UriTemplateParser.java | {
"start": 11637,
"end": 11870
} | interface ____ {
/**
* Visit parts using a visitor.
*
* @param visitor The visitor
*/
void visit(PartVisitor visitor);
}
/**
* The parts visitor.
*/
public | Part |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/DefaultPathMatcherFactory.java | {
"start": 1582,
"end": 2800
} | class ____ implements PathMatcherFactory {
@Nonnull
@Override
public PathMatcher createPathMatcher(
@Nonnull Path baseDirectory,
Collection<String> includes,
Collection<String> excludes,
boolean useDefaultExcludes) {
requireNonNull(baseDirectory, "baseDirectory cannot be null");
return PathSelector.of(baseDirectory, includes, excludes, useDefaultExcludes);
}
@Nonnull
@Override
public PathMatcher createExcludeOnlyMatcher(
@Nonnull Path baseDirectory, Collection<String> excludes, boolean useDefaultExcludes) {
return createPathMatcher(baseDirectory, null, excludes, useDefaultExcludes);
}
@Nonnull
@Override
public PathMatcher deriveDirectoryMatcher(@Nonnull PathMatcher fileMatcher) {
if (Objects.requireNonNull(fileMatcher) instanceof PathSelector selector) {
if (selector.canFilterDirectories()) {
return selector::couldHoldSelected;
}
}
return PathSelector.INCLUDES_ALL;
}
@Nonnull
@Override
public PathMatcher includesAll() {
return PathSelector.INCLUDES_ALL;
}
}
| DefaultPathMatcherFactory |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/builder/lifecycle/Item.java | {
"start": 239,
"end": 505
} | class ____ {
private final String name;
Item(Builder builder) {
this.name = builder.name;
}
public String getName() {
return name;
}
public static Builder builder() {
return new Builder();
}
public static | Item |
java | quarkusio__quarkus | devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/examples/funqy-google-cloud-functions-example/java/src/main/java/org/acme/funqygooglecloudfunctions/GreetingFunctions.java | {
"start": 265,
"end": 1193
} | class ____ {
@Inject
GreetingService service;
@Funq
public void helloPubSubWorld(PubsubMessage pubSubEvent) {
String message = service.hello("world");
System.out.println(pubSubEvent.messageId + " - " + message);
}
@Funq
public void helloGCSWorld(StorageEvent storageEvent) {
String message = service.hello("world");
System.out.println(storageEvent.name + " - " + message);
}
@Funq
public void helloCloudEvent(CloudEvent cloudEvent) {
System.out.println("Receive event Id: " + cloudEvent.getId());
System.out.println("Receive event Subject: " + cloudEvent.getSubject());
System.out.println("Receive event Type: " + cloudEvent.getType());
System.out.println("Receive event Data: " + new String(cloudEvent.getData().toBytes()));
System.out.println("Be polite, say " + service.hello("world"));
}
}
| GreetingFunctions |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/criteria/JpaTreatedPath.java | {
"start": 238,
"end": 341
} | interface ____<T,S extends T> extends JpaPath<S> {
ManagedDomainType<S> getTreatTarget();
}
| JpaTreatedPath |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java | {
"start": 2956,
"end": 3912
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TimelineReaderWebServices.class);
@Context
private ServletContext ctxt;
private static final String QUERY_STRING_SEP = "?";
private static final String RANGE_DELIMITER = "-";
private static final String DATE_PATTERN = "yyyyMMdd";
private static final TimelineReaderMetrics METRICS =
TimelineReaderMetrics.getInstance();
@VisibleForTesting
static final ThreadLocal<DateFormat> DATE_FORMAT =
new ThreadLocal<DateFormat>() {
@Override
protected DateFormat initialValue() {
SimpleDateFormat format =
new SimpleDateFormat(DATE_PATTERN, Locale.ENGLISH);
format.setTimeZone(TimeZone.getTimeZone("GMT"));
format.setLenient(false);
return format;
}
};
private void init(HttpServletResponse response) {
response.setContentType(null);
}
private static final | TimelineReaderWebServices |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithMockEndpointsHavingParameterTest.java | {
"start": 1148,
"end": 3762
} | class ____ extends ContextTestSupport {
@Test
public void testNoAdvised() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:foo").expectedBodiesReceived("Bye World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testAdvisedMockEndpoints() throws Exception {
// advice the first route using the inlined AdviceWith route builder
// which has extended capabilities than the regular route builder
AdviceWith.adviceWith(context.getRouteDefinitions().get(1), context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
// mock all endpoints (will mock in all routes)
mockEndpoints();
}
});
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:foo").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:direct:start").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:log:start").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:seda:foo").expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
// additional test to ensure correct endpoints in registry
assertNotNull(context.hasEndpoint("direct:start"));
assertNotNull(context.hasEndpoint("seda:foo"));
assertNotNull(context.hasEndpoint("log:foo?showHeaders=false"));
assertNotNull(context.hasEndpoint("log:start?showAll=true"));
assertNotNull(context.hasEndpoint("mock:result"));
// all the endpoints was mocked
assertNotNull(context.hasEndpoint("mock:direct:start"));
assertNotNull(context.hasEndpoint("mock:seda:foo"));
assertNotNull(context.hasEndpoint("mock:log:start"));
assertNotNull(context.hasEndpoint("mock:log:foo"));
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("seda:foo?size=20").transform(constant("Bye World")).log("We transformed ${body}")
.to("log:foo?showHeaders=false").to("mock:foo");
from("direct:start").to("seda:foo").to("log:start?showAll=true").to("mock:result");
}
};
}
}
| AdviceWithMockEndpointsHavingParameterTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AMRMTokenIdentifier.java | {
"start": 4710,
"end": 5276
} | class ____ extends Token.TrivialRenewer {
@Override
protected Text getKind() {
return KIND_NAME;
}
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}
| Renewer |
java | micronaut-projects__micronaut-core | inject-java-test/src/test/groovy/io/micronaut/inject/visitor/beans/builder/TestBuildMe9.java | {
"start": 714,
"end": 1319
} | class ____ {
private String name;
private int age;
protected Builder() {
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder name(Optional<String> name) {
this.name = name.orElse(null);
return this;
}
public Builder age(int age) {
this.age = age;
return this;
}
public TestBuildMe9 build() {
return new TestBuildMe9(
name,
age
);
}
}
}
| Builder |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/KalmanFilter1d.java | {
"start": 783,
"end": 4206
} | class ____ {
private static final Logger logger = LogManager.getLogger(KalmanFilter1d.class);
private final String name;
private final double smoothingFactor;
private final boolean autodetectDynamicsChange;
private double value;
private double variance;
private boolean dynamicsChangedLastTime;
KalmanFilter1d(String name, double smoothingFactor, boolean autodetectDynamicsChange) {
this.name = name;
this.smoothingFactor = smoothingFactor;
this.autodetectDynamicsChange = autodetectDynamicsChange;
this.value = Double.MAX_VALUE;
this.variance = Double.MAX_VALUE;
this.dynamicsChangedLastTime = false;
}
/**
* Adds a measurement (value, variance) to the estimator.
* dynamicChangedExternal indicates whether the underlying possibly changed before this measurement.
*/
void add(double value, double variance, boolean dynamicChangedExternal) {
boolean dynamicChanged;
if (hasValue() == false) {
dynamicChanged = true;
this.value = value;
this.variance = variance;
} else {
double processVariance = variance / smoothingFactor;
dynamicChanged = dynamicChangedExternal || detectDynamicsChange(value, variance);
if (dynamicChanged || dynamicsChangedLastTime) {
// If we know we likely had a change in the quantity we're estimating or the prediction
// is 10 stddev off, we inject extra noise in the dynamics for this step.
processVariance = Math.pow(value, 2);
}
double gain = (this.variance + processVariance) / (this.variance + processVariance + variance);
this.value += gain * (value - this.value);
this.variance = (1 - gain) * (this.variance + processVariance);
}
dynamicsChangedLastTime = dynamicChanged;
logger.debug(
() -> Strings.format(
"[%s] measurement %.3f ± %.3f: estimate %.3f ± %.3f (dynamic changed: %s).",
name,
value,
Math.sqrt(variance),
this.value,
Math.sqrt(this.variance),
dynamicChanged
)
);
}
/**
* Returns whether the estimator has received data and contains a value.
*/
boolean hasValue() {
return this.value < Double.MAX_VALUE && this.variance < Double.MAX_VALUE;
}
/**
* Returns the estimate of the mean value.
*/
double estimate() {
return value;
}
/**
* Returns the stderr of the estimate.
*/
double error() {
return Math.sqrt(this.variance);
}
/**
* Returns the lowerbound of the 1 stddev confidence interval of the estimate.
*/
double lower() {
return value - error();
}
/**
* Returns the upperbound of the 1 stddev confidence interval of the estimate.
*/
double upper() {
return value + error();
}
/**
* Returns whether (value, variance) is very unlikely, indicating that
* the underlying dynamics have changed.
*/
private boolean detectDynamicsChange(double value, double variance) {
return hasValue() && autodetectDynamicsChange && Math.pow(Math.abs(value - this.value), 2) / (variance + this.variance) > 100.0;
}
}
| KalmanFilter1d |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/resource/transaction/spi/TransactionCoordinator.java | {
"start": 3223,
"end": 4219
} | interface ____ {
/**
* Begin the physical transaction
*/
void begin();
/**
* Commit the physical transaction
*/
void commit();
/**
* Rollback the physical transaction
*/
void rollback();
TransactionStatus getStatus();
void markRollbackOnly();
default boolean isActive() {
final TransactionStatus status = getStatus();
return status == ACTIVE || status == MARKED_ROLLBACK;
}
default boolean isActiveAndNoMarkedForRollback() {
return getStatus() == ACTIVE;
}
// todo : org.hibernate.Transaction will need access to register local Synchronizations.
// depending on how we integrate TransactionCoordinator/TransactionDriverControl with
// org.hibernate.Transaction that might be best done by:
// 1) exposing registerSynchronization here (if the Transaction is just passed this)
// 2) using the exposed TransactionCoordinator#getLocalSynchronizations (if the Transaction is passed the TransactionCoordinator)
}
}
| TransactionDriver |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/javadoc/InvalidLinkTest.java | {
"start": 1520,
"end": 1721
} | interface ____ {
/** {@link https://foo/bar/baz} */
void foo();
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerStartCommand.java | {
"start": 1155,
"end": 1922
} | class ____ {
private DockerStartCommand dockerStartCommand;
private static final String CONTAINER_NAME = "foo";
@BeforeEach
public void setUp() {
dockerStartCommand = new DockerStartCommand(CONTAINER_NAME);
}
@Test
public void testGetCommandOption() {
assertEquals("start", dockerStartCommand.getCommandOption());
}
@Test
public void testGetCommandWithArguments() {
assertEquals("start", StringUtils.join(",",
dockerStartCommand.getDockerCommandWithArguments()
.get("docker-command")));
assertEquals("foo", StringUtils.join(",",
dockerStartCommand.getDockerCommandWithArguments().get("name")));
assertEquals(2, dockerStartCommand.getDockerCommandWithArguments().size());
}
} | TestDockerStartCommand |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/query/SqlResultSetMappingDescriptor.java | {
"start": 8616,
"end": 11565
} | class ____ implements ResultDescriptor {
private final NavigablePath navigablePath;
private final String entityName;
private final String discriminatorColumn;
private final LockModeType lockMode;
private final Map<String, AttributeFetchDescriptor> explicitFetchMappings;
public EntityResultDescriptor(EntityResult entityResult) {
this.entityName = entityResult.entityClass().getName();
this.navigablePath = new NavigablePath( entityName );
this.discriminatorColumn = entityResult.discriminatorColumn();
this.lockMode = entityResult.lockMode();
this.explicitFetchMappings = extractFetchMappings( navigablePath, entityResult );
}
private static Map<String, AttributeFetchDescriptor> extractFetchMappings(
NavigablePath navigablePath,
EntityResult entityResult) {
final FieldResult[] fields = entityResult.fields();
final Map<String, AttributeFetchDescriptor> explicitFetchMappings = mapOfSize( fields.length );
for ( int i = 0; i < fields.length; i++ ) {
final FieldResult fieldResult = fields[i];
final String fieldName = fieldResult.name();
final AttributeFetchDescriptor existing = explicitFetchMappings.get( fieldName );
if ( existing != null ) {
existing.addColumn( fieldResult );
}
else {
explicitFetchMappings.put(
fieldName,
AttributeFetchDescriptor.from( navigablePath, navigablePath.getFullPath(), fieldResult )
);
}
}
return explicitFetchMappings;
}
@Override
public ResultMemento resolve(ResultSetMappingResolutionContext resolutionContext) {
final EntityMappingType entityDescriptor =
resolutionContext.getMappingMetamodel().getEntityDescriptor( entityName );
final FetchMementoBasic discriminatorMemento = resolveDiscriminatorMemento(
entityDescriptor,
discriminatorColumn,
navigablePath
);
final Map<String, FetchMemento> fetchMementos = new HashMap<>();
explicitFetchMappings.forEach(
(relativePath, fetchDescriptor) -> fetchMementos.put(
relativePath,
fetchDescriptor.resolve( resolutionContext )
)
);
return new ResultMementoEntityJpa(
entityDescriptor,
lockMode == LockModeType.OPTIMISTIC
? LockMode.NONE
: LockMode.fromJpaLockMode( lockMode ),
discriminatorMemento,
fetchMementos
);
}
private static FetchMementoBasic resolveDiscriminatorMemento(
EntityMappingType entityMapping,
String discriminatorColumn,
NavigablePath entityPath) {
final EntityDiscriminatorMapping discriminatorMapping = entityMapping.getDiscriminatorMapping();
if ( discriminatorMapping == null || discriminatorColumn == null || !entityMapping.hasSubclasses() ) {
return null;
}
return new FetchMementoBasicStandard(
entityPath.append( EntityDiscriminatorMapping.DISCRIMINATOR_ROLE_NAME ),
discriminatorMapping,
discriminatorColumn
);
}
}
private static | EntityResultDescriptor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRouterCLI.java | {
"start": 2849,
"end": 15780
} | class ____ {
private ResourceManagerAdministrationProtocol admin;
private RouterCLI rmAdminCLI;
private final static int SUBCLUSTER_NUM = 4;
@BeforeEach
public void setup() throws Exception {
admin = mock(ResourceManagerAdministrationProtocol.class);
when(admin.deregisterSubCluster(any(DeregisterSubClusterRequest.class)))
.thenAnswer((Answer<DeregisterSubClusterResponse>) invocationOnMock -> {
// Step1. parse subClusterId.
Object obj = invocationOnMock.getArgument(0);
DeregisterSubClusterRequest request = (DeregisterSubClusterRequest) obj;
String subClusterId = request.getSubClusterId();
if (StringUtils.isNotBlank(subClusterId)) {
return generateSubClusterDataBySCId(subClusterId);
} else {
return generateAllSubClusterData();
}
});
when(admin.saveFederationQueuePolicy(any(SaveFederationQueuePolicyRequest.class)))
.thenAnswer((Answer<SaveFederationQueuePolicyResponse>) invocationOnMock -> {
// Step1. parse subClusterId.
Object obj = invocationOnMock.getArgument(0);
SaveFederationQueuePolicyRequest request = (SaveFederationQueuePolicyRequest) obj;
return SaveFederationQueuePolicyResponse.newInstance("success");
});
when(admin.listFederationQueuePolicies(any(QueryFederationQueuePoliciesRequest.class)))
.thenAnswer((Answer<QueryFederationQueuePoliciesResponse>) invocationOnMock -> {
// Step1. parse request.
Object obj = invocationOnMock.getArgument(0);
QueryFederationQueuePoliciesRequest request = (QueryFederationQueuePoliciesRequest) obj;
String queue = request.getQueue();
List<FederationQueueWeight> weights = new ArrayList<>();
FederationQueueWeight weight = FederationQueueWeight.newInstance(
"SC-1:0.8,SC-2:0.2", "SC-1:0.6,SC-2:0.4", "1", queue, "test");
weights.add(weight);
return QueryFederationQueuePoliciesResponse.newInstance(1, 1, 1, 10, weights);
});
when(admin.getFederationSubClusters(any(GetSubClustersRequest.class)))
.thenAnswer((Answer<GetSubClustersResponse>) invocationOnMock -> {
// Step1. parse request.
List<FederationSubCluster> subClustersList = new ArrayList<>();
// Add SC-1
FederationSubCluster subCluster1 = FederationSubCluster.newInstance("SC-1",
"RUNNING", new Date().toString());
// Add SC-2
FederationSubCluster subCluster2 = FederationSubCluster.newInstance("SC-2",
"RUNNING", new Date().toString());
subClustersList.add(subCluster1);
subClustersList.add(subCluster2);
return GetSubClustersResponse.newInstance(subClustersList);
});
when(admin.deleteFederationPoliciesByQueues(any(DeleteFederationQueuePoliciesRequest.class)))
.thenAnswer((Answer<DeleteFederationQueuePoliciesResponse>) invocationOnMock -> {
// Step1. parse request.
Object obj = invocationOnMock.getArgument(0);
DeleteFederationQueuePoliciesRequest request = (DeleteFederationQueuePoliciesRequest) obj;
List<String> queues = request.getQueues();
return DeleteFederationQueuePoliciesResponse.newInstance("queues = " +
StringUtils.join(queues, ",") + " delete success.");
});
Configuration config = new Configuration();
config.setBoolean(YarnConfiguration.FEDERATION_ENABLED, true);
rmAdminCLI = new RouterCLI(config) {
@Override
protected ResourceManagerAdministrationProtocol createAdminProtocol() {
return admin;
}
};
}
private DeregisterSubClusterResponse generateSubClusterDataBySCId(String subClusterId) {
// Step2. generate return data.
String lastHeartBeatTime = new Date().toString();
DeregisterSubClusters deregisterSubClusters =
DeregisterSubClusters.newInstance(subClusterId, "SUCCESS", lastHeartBeatTime,
"Heartbeat Time > 30 minutes", "SC_LOST");
List<DeregisterSubClusters> deregisterSubClusterList = new ArrayList<>();
deregisterSubClusterList.add(deregisterSubClusters);
// Step3. return data.
return DeregisterSubClusterResponse.newInstance(deregisterSubClusterList);
}
private DeregisterSubClusterResponse generateAllSubClusterData() {
List<DeregisterSubClusters> deregisterSubClusterList = new ArrayList<>();
for (int i = 1; i <= SUBCLUSTER_NUM; i++) {
String subClusterId = "SC-" + i;
String lastHeartBeatTime = new Date().toString();
DeregisterSubClusters deregisterSubClusters =
DeregisterSubClusters.newInstance(subClusterId, "SUCCESS", lastHeartBeatTime,
"Heartbeat Time > 30 minutes", "SC_LOST");
deregisterSubClusterList.add(deregisterSubClusters);
}
return DeregisterSubClusterResponse.newInstance(deregisterSubClusterList);
}
@Test
public void testHelp() throws Exception {
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
ByteArrayOutputStream dataErr = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
System.setErr(new PrintStream(dataErr));
String[] args = {"-help"};
rmAdminCLI.run(args);
assertEquals(0, rmAdminCLI.run(args));
args = new String[]{"-help", "-deregisterSubCluster"};
rmAdminCLI.run(args);
args = new String[]{"-help", "-policy"};
rmAdminCLI.run(args);
}
@Test
public void testDeregisterSubCluster() throws Exception {
PrintStream oldOutPrintStream = System.out;
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
oldOutPrintStream.println(dataOut);
String[] args = {"-deregisterSubCluster", "-sc", "SC-1"};
assertEquals(0, rmAdminCLI.run(args));
args = new String[]{"-deregisterSubCluster", "--subClusterId", "SC-1"};
assertEquals(0, rmAdminCLI.run(args));
}
@Test
public void testDeregisterSubClusters() throws Exception {
PrintStream oldOutPrintStream = System.out;
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
oldOutPrintStream.println(dataOut);
String[] args = {"-deregisterSubCluster"};
assertEquals(0, rmAdminCLI.run(args));
args = new String[]{"-deregisterSubCluster", "-sc"};
assertEquals(0, rmAdminCLI.run(args));
args = new String[]{"-deregisterSubCluster", "--sc", ""};
assertEquals(0, rmAdminCLI.run(args));
args = new String[]{"-deregisterSubCluster", "--subClusterId"};
assertEquals(0, rmAdminCLI.run(args));
args = new String[]{"-deregisterSubCluster", "--subClusterId", ""};
assertEquals(0, rmAdminCLI.run(args));
}
@Test
public void testParsePolicy() throws Exception {
// Case1, If policy is empty.
String errMsg1 = "The policy cannot be empty or the policy is incorrect. \n" +
" Required information to provide: queue,router weight,amrm weight,headroomalpha \n" +
" eg. root.a;SC-1:0.7,SC-2:0.3;SC-1:0.7,SC-2:0.3;1.0";
LambdaTestUtils.intercept(YarnException.class, errMsg1, () -> rmAdminCLI.parsePolicy(""));
// Case2, If policy is incomplete, We need 4 items, but only 2 of them are provided.
LambdaTestUtils.intercept(YarnException.class, errMsg1,
() -> rmAdminCLI.parsePolicy("root.a;SC-1:0.1,SC-2:0.9;"));
// Case3, If policy is incomplete, The weight of a subcluster is missing.
String errMsg2 = "The subClusterWeight cannot be empty, " +
"and the subClusterWeight size must be 2. (eg.SC-1,0.2)";
LambdaTestUtils.intercept(YarnException.class, errMsg2,
() -> rmAdminCLI.parsePolicy("root.a;SC-1:0.1,SC-2;SC-1:0.1,SC-2;0.3,1.0"));
// Case4, The policy is complete, but the sum of weights for each subcluster is not equal to 1.
String errMsg3 = "The sum of ratios for all subClusters must be equal to 1.";
LambdaTestUtils.intercept(YarnException.class, errMsg3,
() -> rmAdminCLI.parsePolicy("root.a;SC-1:0.1,SC-2:0.8;SC-1:0.1,SC-2;0.3,1.0"));
// If policy is root.a;SC-1:0.7,SC-2:0.3;SC-1:0.7,SC-2:0.3;1.0
String policy = "root.a;SC-1:0.7,SC-2:0.3;SC-1:0.6,SC-2:0.4;1.0";
SaveFederationQueuePolicyRequest request = rmAdminCLI.parsePolicy(policy);
FederationQueueWeight federationQueueWeight = request.getFederationQueueWeight();
assertNotNull(federationQueueWeight);
assertEquals("SC-1:0.7,SC-2:0.3", federationQueueWeight.getRouterWeight());
assertEquals("SC-1:0.6,SC-2:0.4", federationQueueWeight.getAmrmWeight());
assertEquals("1.0", federationQueueWeight.getHeadRoomAlpha());
}
@Test
public void testSavePolicy() throws Exception {
PrintStream oldOutPrintStream = System.out;
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
oldOutPrintStream.println(dataOut);
String[] args = {"-policy", "-s", "root.a;SC-1:0.1,SC-2:0.9;SC-1:0.7,SC-2:0.3;1.0"};
assertEquals(0, rmAdminCLI.run(args));
args = new String[]{"-policy", "-save", "root.a;SC-1:0.1,SC-2:0.9;SC-1:0.7,SC-2:0.3;1.0"};
assertEquals(0, rmAdminCLI.run(args));
}
@Test
public void testParsePoliciesByXml() throws Exception {
String filePath =
TestRouterCLI.class.getClassLoader().getResource("federation-weights.xml").getFile();
List<FederationQueueWeight> federationQueueWeights = rmAdminCLI.parsePoliciesByXml(filePath);
assertNotNull(federationQueueWeights);
assertEquals(2, federationQueueWeights.size());
// Queue1: root.a
FederationQueueWeight queueWeight1 = federationQueueWeights.get(0);
assertNotNull(queueWeight1);
assertEquals("root.a", queueWeight1.getQueue());
assertEquals("SC-1:0.7,SC-2:0.3", queueWeight1.getAmrmWeight());
assertEquals("SC-1:0.6,SC-2:0.4", queueWeight1.getRouterWeight());
// Queue2: root.b
FederationQueueWeight queueWeight2 = federationQueueWeights.get(1);
assertNotNull(queueWeight2);
assertEquals("root.b", queueWeight2.getQueue());
assertEquals("SC-1:0.8,SC-2:0.2", queueWeight2.getAmrmWeight());
assertEquals("SC-1:0.6,SC-2:0.4", queueWeight2.getRouterWeight());
}
@Test
public void testListPolicies() throws Exception {
PrintStream oldOutPrintStream = System.out;
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
oldOutPrintStream.println(dataOut);
String[] args = {"-policy", "-l", "--queue", "root.a"};
assertEquals(0, rmAdminCLI.run(args));
}
@Test
public void testBuildHelpMsg() throws Exception {
Map<String, RouterCLI.RouterCmdUsageInfos> adminUsage = rmAdminCLI.getAdminUsage();
assertEquals(3, adminUsage.size());
RouterCLI.RouterCmdUsageInfos subClusterUsageInfos = adminUsage.get("-subCluster");
assertNotNull(subClusterUsageInfos);
Map<String, List<String>> dsExamplesMap = subClusterUsageInfos.getExamples();
assertNotNull(dsExamplesMap);
assertEquals(2, dsExamplesMap.size());
List<String> dsExamples = dsExamplesMap.get("-deregisterSubCluster <-sc|--subClusterId>");
assertNotNull(dsExamples);
assertEquals(2, dsExamples.size());
List<String> getSubClustersExamples = dsExamplesMap.get("-getSubClusters");
assertNotNull(getSubClustersExamples);
assertEquals(1, getSubClustersExamples.size());
RouterCLI.RouterCmdUsageInfos policyUsageInfos = adminUsage.get("-policy");
assertNotNull(policyUsageInfos);
Map<String, List<String>> policyExamplesMap = policyUsageInfos.getExamples();
assertNotNull(policyExamplesMap);
assertEquals(4, policyExamplesMap.size());
policyExamplesMap.forEach((cmd, cmdExamples) -> {
assertEquals(2, cmdExamples.size());
});
RouterCLI.RouterCmdUsageInfos applicationUsageInfos = adminUsage.get("-application");
assertNotNull(applicationUsageInfos);
Map<String, List<String>> applicationExamplesMap = applicationUsageInfos.getExamples();
assertNotNull(applicationExamplesMap);
assertEquals(1, applicationExamplesMap.size());
}
@Test
public void testGetSubClusters() throws Exception {
PrintStream oldOutPrintStream = System.out;
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
oldOutPrintStream.println(dataOut);
String[] args = {"-subCluster", "-getSubClusters"};
assertEquals(0, rmAdminCLI.run(args));
}
@Test
public void testDeleteFederationPoliciesByQueues() throws Exception {
PrintStream oldOutPrintStream = System.out;
ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
System.setOut(new PrintStream(dataOut));
oldOutPrintStream.println(dataOut);
String[] args = {"-policy", "-d", "--queue", "root.a"};
assertEquals(0, rmAdminCLI.run(args));
}
}
| TestRouterCLI |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/protocol/Encoders.java | {
"start": 6414,
"end": 7139
} | class ____ {
public static int encodedLength(RoaringBitmap[] bitmaps) {
int totalLength = 4;
for (RoaringBitmap b : bitmaps) {
totalLength += Bitmaps.encodedLength(b);
}
return totalLength;
}
public static void encode(ByteBuf buf, RoaringBitmap[] bitmaps) {
buf.writeInt(bitmaps.length);
for (RoaringBitmap b : bitmaps) {
Bitmaps.encode(buf, b);
}
}
public static RoaringBitmap[] decode(ByteBuf buf) {
int numBitmaps = buf.readInt();
RoaringBitmap[] bitmaps = new RoaringBitmap[numBitmaps];
for (int i = 0; i < bitmaps.length; i ++) {
bitmaps[i] = Bitmaps.decode(buf);
}
return bitmaps;
}
}
}
| BitmapArrays |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/StructWithArrayEmbeddableTest.java | {
"start": 3339,
"end": 22940
} | class ____ implements AdditionalMappingContributor {
@Override
public void contribute(
AdditionalMappingContributions contributions,
InFlightMetadataCollector metadata,
ResourceStreamLocator resourceStreamLocator,
MetadataBuildingContext buildingContext) {
final Namespace namespace = new Namespace(
PhysicalNamingStrategyStandardImpl.INSTANCE,
null,
new Namespace.Name( null, null )
);
//---------------------------------------------------------
// PostgreSQL
//---------------------------------------------------------
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgreSQL structFunction",
namespace,
"create function structFunction() returns structType as $$ declare result structType; begin result.theBinary = array[bytea '\\x01']; result.theString = array['ABC']; result.theDouble = array[0]; result.theInt = array[0]; result.theLocalDateTime = array[timestamp '2022-12-01 01:00:00']; result.theUuid = array['53886a8a-7082-4879-b430-25cb94415be8'::uuid]; return result; end $$ language plpgsql",
"drop function structFunction",
Set.of( PostgreSQLDialect.class.getName() )
)
);
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgreSQL structProcedure",
namespace,
"create procedure structProcedure(INOUT result structType) AS $$ declare res structType; begin res.theBinary = array[bytea '\\x01']; res.theString = array['ABC']; res.theDouble = array[0]; res.theInt = array[0]; res.theLocalDateTime = array[timestamp '2022-12-01 01:00:00']; res.theUuid = array['53886a8a-7082-4879-b430-25cb94415be8'::uuid]; result = res; end $$ language plpgsql",
"drop procedure structProcedure",
Set.of( PostgreSQLDialect.class.getName() )
)
);
//---------------------------------------------------------
// PostgresPlus
//---------------------------------------------------------
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgrePlus structFunction",
namespace,
"create function structFunction() returns structType as $$ declare result structType; begin result.theBinary = array[bytea '\\x01']; result.theString = array['ABC']; result.theDouble = array[0]; result.theInt = array[0]; result.theLocalDateTime = array[timestamp '2022-12-01 01:00:00']; result.theUuid = array['53886a8a-7082-4879-b430-25cb94415be8'::uuid]; return result; end $$ language plpgsql",
"drop function structFunction",
Set.of( PostgresPlusDialect.class.getName() )
)
);
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgrePlus structProcedure",
namespace,
"create procedure structProcedure(result INOUT structType) AS $$ declare res structType; begin res.theBinary = array[bytea '\\x01']; res.theString = array['ABC']; res.theDouble = array[0]; res.theInt = array[0]; res.theLocalDateTime = array[timestamp '2022-12-01 01:00:00']; res.theUuid = array['53886a8a-7082-4879-b430-25cb94415be8'::uuid]; result = res; end $$ language plpgsql",
"drop procedure structProcedure",
Set.of( PostgresPlusDialect.class.getName() )
)
);
//---------------------------------------------------------
// Oracle
//---------------------------------------------------------
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"Oracle structFunction",
namespace,
"create function structFunction return structType is result structType; begin " +
"result := structType(" +
"theBinary => byteArrayArray( hextoraw('01') )," +
"theString => StringArray( 'ABC' )," +
"theDouble => DoubleArray( 0 )," +
"theInt => IntegerArray( 0 )," +
"theLocalDateTime => LocalDateTimeTimestampArray( timestamp '2022-12-01 01:00:00' )," +
"theUuid => UUIDbyteArrayArray( hextoraw('53886a8a70824879b43025cb94415be8') )," +
"converted_gender => null," +
"gender => null," +
"mutableValue => null," +
"ordinal_gender => null," +
"theBoolean => null," +
"theClob => null," +
"theDate => null," +
"theDuration => null," +
"theInstant => null," +
"theInteger => null," +
"theLocalDate => null," +
"theLocalTime => null," +
"theNumericBoolean => null," +
"theOffsetDateTime => null," +
"theStringBoolean => null," +
"theTime => null," +
"theTimestamp => null," +
"theUrl => null," +
"theZonedDateTime => null" +
"); return result; end;",
"drop function structFunction",
Set.of( OracleDialect.class.getName() )
)
);
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"Oracle structProcedure",
namespace,
"create procedure structProcedure(result OUT structType) AS begin " +
"result := structType(" +
"theBinary => byteArrayArray( hextoraw('01') )," +
"theString => StringArray( 'ABC' )," +
"theDouble => DoubleArray( 0 )," +
"theInt => IntegerArray( 0 )," +
"theLocalDateTime => LocalDateTimeTimestampArray( timestamp '2022-12-01 01:00:00' )," +
"theUuid => UUIDbyteArrayArray( hextoraw('53886a8a70824879b43025cb94415be8') )," +
"converted_gender => null," +
"gender => null," +
"mutableValue => null," +
"ordinal_gender => null," +
"theBoolean => null," +
"theClob => null," +
"theDate => null," +
"theDuration => null," +
"theInstant => null," +
"theInteger => null," +
"theLocalDate => null," +
"theLocalTime => null," +
"theNumericBoolean => null," +
"theOffsetDateTime => null," +
"theStringBoolean => null," +
"theTime => null," +
"theTimestamp => null," +
"theUrl => null," +
"theZonedDateTime => null" +
"); end;",
"drop procedure structProcedure",
Set.of( OracleDialect.class.getName() )
)
);
}
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.persist( new StructHolder( 1L, EmbeddableWithArrayAggregate.createAggregate1() ) );
session.persist( new StructHolder( 2L, EmbeddableWithArrayAggregate.createAggregate2() ) );
}
);
}
@AfterEach
protected void cleanupTest(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testUpdate(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
StructHolder structHolder = entityManager.find( StructHolder.class, 1L );
structHolder.setAggregate( EmbeddableWithArrayAggregate.createAggregate2() );
entityManager.flush();
entityManager.clear();
assertStructEquals( EmbeddableWithArrayAggregate.createAggregate2(), entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testFetch(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<StructHolder> structHolders = entityManager.createQuery( "from StructHolder b where b.id = 1", StructHolder.class ).getResultList();
assertEquals( 1, structHolders.size() );
assertEquals( 1L, structHolders.get( 0 ).getId() );
assertStructEquals( EmbeddableWithArrayAggregate.createAggregate1(), structHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testFetchNull(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<StructHolder> structHolders = entityManager.createQuery( "from StructHolder b where b.id = 2", StructHolder.class ).getResultList();
assertEquals( 1, structHolders.size() );
assertEquals( 2L, structHolders.get( 0 ).getId() );
assertStructEquals( EmbeddableWithArrayAggregate.createAggregate2(), structHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testDomainResult(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<EmbeddableWithArrayAggregate> structs = entityManager.createQuery( "select b.aggregate from StructHolder b where b.id = 1", EmbeddableWithArrayAggregate.class ).getResultList();
assertEquals( 1, structs.size() );
assertStructEquals( EmbeddableWithArrayAggregate.createAggregate1(), structs.get( 0 ) );
}
);
}
@Test
@SkipForDialect( dialectClass = OracleDialect.class, reason = "External driver fix required")
public void testSelectionItems(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.aggregate.theInt," +
"b.aggregate.theDouble," +
"b.aggregate.theBoolean," +
"b.aggregate.theNumericBoolean," +
"b.aggregate.theStringBoolean," +
"b.aggregate.theString," +
"b.aggregate.theInteger," +
"b.aggregate.theUrl," +
"b.aggregate.theClob," +
"b.aggregate.theBinary," +
"b.aggregate.theDate," +
"b.aggregate.theTime," +
"b.aggregate.theTimestamp," +
"b.aggregate.theInstant," +
"b.aggregate.theUuid," +
"b.aggregate.gender," +
"b.aggregate.convertedGender," +
"b.aggregate.ordinalGender," +
"b.aggregate.theDuration," +
"b.aggregate.theLocalDateTime," +
"b.aggregate.theLocalDate," +
"b.aggregate.theLocalTime," +
"b.aggregate.theZonedDateTime," +
"b.aggregate.theOffsetDateTime," +
"b.aggregate.mutableValue " +
"from StructHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
final EmbeddableWithArrayAggregate struct = new EmbeddableWithArrayAggregate();
struct.setTheInt( tuple.get( 0, int[].class ) );
struct.setTheDouble( tuple.get( 1, double[].class ) );
struct.setTheBoolean( tuple.get( 2, Boolean[].class ) );
struct.setTheNumericBoolean( tuple.get( 3, Boolean[].class ) );
struct.setTheStringBoolean( tuple.get( 4, Boolean[].class ) );
struct.setTheString( tuple.get( 5, String[].class ) );
struct.setTheInteger( tuple.get( 6, Integer[].class ) );
struct.setTheUrl( tuple.get( 7, URL[].class ) );
struct.setTheClob( tuple.get( 8, String[].class ) );
struct.setTheBinary( tuple.get( 9, byte[][].class ) );
struct.setTheDate( tuple.get( 10, Date[].class ) );
struct.setTheTime( tuple.get( 11, Time[].class ) );
struct.setTheTimestamp( tuple.get( 12, Timestamp[].class ) );
struct.setTheInstant( tuple.get( 13, Instant[].class ) );
struct.setTheUuid( tuple.get( 14, UUID[].class ) );
struct.setGender( tuple.get( 15, EntityOfBasics.Gender[].class ) );
struct.setConvertedGender( tuple.get( 16, EntityOfBasics.Gender[].class ) );
struct.setOrdinalGender( tuple.get( 17, EntityOfBasics.Gender[].class ) );
struct.setTheDuration( tuple.get( 18, Duration[].class ) );
struct.setTheLocalDateTime( tuple.get( 19, LocalDateTime[].class ) );
struct.setTheLocalDate( tuple.get( 20, LocalDate[].class ) );
struct.setTheLocalTime( tuple.get( 21, LocalTime[].class ) );
struct.setTheZonedDateTime( tuple.get( 22, ZonedDateTime[].class ) );
struct.setTheOffsetDateTime( tuple.get( 23, OffsetDateTime[].class ) );
struct.setMutableValue( tuple.get( 24, MutableValue[].class ) );
EmbeddableWithArrayAggregate.assertEquals( EmbeddableWithArrayAggregate.createAggregate1(), struct );
}
);
}
@Test
public void testDeleteWhere(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "delete StructHolder b where b.aggregate is not null" ).executeUpdate();
assertNull( entityManager.find( StructHolder.class, 1L ) );
}
);
}
@Test
public void testUpdateAggregate(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update StructHolder b set b.aggregate = null" ).executeUpdate();
assertNull( entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateAggregateMember(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update StructHolder b set b.aggregate.theString = null" ).executeUpdate();
EmbeddableWithArrayAggregate struct = EmbeddableWithArrayAggregate.createAggregate1();
struct.setTheString( null );
assertStructEquals( struct, entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateMultipleAggregateMembers(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update StructHolder b set b.aggregate.theString = null, b.aggregate.theUuid = null" ).executeUpdate();
EmbeddableWithArrayAggregate struct = EmbeddableWithArrayAggregate.createAggregate1();
struct.setTheString( null );
struct.setTheUuid( null );
assertStructEquals( struct, entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateAllAggregateMembers(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
EmbeddableWithArrayAggregate struct = EmbeddableWithArrayAggregate.createAggregate1();
entityManager.createMutationQuery(
"update StructHolder b set " +
"b.aggregate.theInt = :theInt," +
"b.aggregate.theDouble = :theDouble," +
"b.aggregate.theBoolean = :theBoolean," +
"b.aggregate.theNumericBoolean = :theNumericBoolean," +
"b.aggregate.theStringBoolean = :theStringBoolean," +
"b.aggregate.theString = :theString," +
"b.aggregate.theInteger = :theInteger," +
"b.aggregate.theUrl = :theUrl," +
"b.aggregate.theClob = :theClob," +
"b.aggregate.theBinary = :theBinary," +
"b.aggregate.theDate = :theDate," +
"b.aggregate.theTime = :theTime," +
"b.aggregate.theTimestamp = :theTimestamp," +
"b.aggregate.theInstant = :theInstant," +
"b.aggregate.theUuid = :theUuid," +
"b.aggregate.gender = :gender," +
"b.aggregate.convertedGender = :convertedGender," +
"b.aggregate.ordinalGender = :ordinalGender," +
"b.aggregate.theDuration = :theDuration," +
"b.aggregate.theLocalDateTime = :theLocalDateTime," +
"b.aggregate.theLocalDate = :theLocalDate," +
"b.aggregate.theLocalTime = :theLocalTime," +
"b.aggregate.theZonedDateTime = :theZonedDateTime," +
"b.aggregate.theOffsetDateTime = :theOffsetDateTime," +
"b.aggregate.mutableValue = :mutableValue " +
"where b.id = 2"
)
.setParameter( "theInt", struct.getTheInt() )
.setParameter( "theDouble", struct.getTheDouble() )
.setParameter( "theBoolean", struct.getTheBoolean() )
.setParameter( "theNumericBoolean", struct.getTheNumericBoolean() )
.setParameter( "theStringBoolean", struct.getTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theUrl", struct.getTheUrl() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )
.setParameter( "theTime", struct.getTheTime() )
.setParameter( "theTimestamp", struct.getTheTimestamp() )
.setParameter( "theInstant", struct.getTheInstant() )
.setParameter( "theUuid", struct.getTheUuid() )
.setParameter( "gender", struct.getGender() )
.setParameter( "convertedGender", struct.getConvertedGender() )
.setParameter( "ordinalGender", struct.getOrdinalGender() )
.setParameter( "theDuration", struct.getTheDuration() )
.setParameter( "theLocalDateTime", struct.getTheLocalDateTime() )
.setParameter( "theLocalDate", struct.getTheLocalDate() )
.setParameter( "theLocalTime", struct.getTheLocalTime() )
.setParameter( "theZonedDateTime", struct.getTheZonedDateTime() )
.setParameter( "theOffsetDateTime", struct.getTheOffsetDateTime() )
.setParameter( "mutableValue", struct.getMutableValue() )
.executeUpdate();
assertStructEquals( EmbeddableWithArrayAggregate.createAggregate1(), entityManager.find( StructHolder.class, 2L ).getAggregate() );
}
);
}
@Test
public void testNativeQuery(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
List<Object> resultList = entityManager.createNativeQuery(
"select b.aggregate from StructHolder b where b.id = 1", Object.class
)
.getResultList();
assertEquals( 1, resultList.size() );
assertInstanceOf( EmbeddableWithArrayAggregate.class, resultList.get( 0 ) );
EmbeddableWithArrayAggregate struct = (EmbeddableWithArrayAggregate) resultList.get( 0 );
assertStructEquals( EmbeddableWithArrayAggregate.createAggregate1(), struct );
}
);
}
@Test
public void testFunction(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
ProcedureCall structFunction = entityManager.createStoredProcedureCall( "structFunction" )
.markAsFunctionCall( EmbeddableWithArrayAggregate.class );
//noinspection unchecked
final List<Object> resultList = structFunction.getResultList();
assertEquals( 1, resultList.size() );
assertInstanceOf( EmbeddableWithArrayAggregate.class, resultList.get( 0 ) );
EmbeddableWithArrayAggregate result = (EmbeddableWithArrayAggregate) resultList.get( 0 );
EmbeddableWithArrayAggregate struct = EmbeddableWithArrayAggregate.createAggregate3();
assertStructEquals( struct, result );
}
);
}
@Test
public void testProcedure(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
final Dialect dialect = entityManager.getJdbcServices().getDialect();
final ParameterMode parameterMode;
if ( dialect instanceof PostgreSQLDialect ) {
parameterMode = ParameterMode.INOUT;
}
else {
parameterMode = ParameterMode.OUT;
}
ProcedureCall structFunction = entityManager.createStoredProcedureCall( "structProcedure" );
ProcedureParameter<EmbeddableWithArrayAggregate> resultParameter = structFunction.registerParameter(
"result",
EmbeddableWithArrayAggregate.class,
parameterMode
);
structFunction.setParameter( resultParameter, null );
EmbeddableWithArrayAggregate result = structFunction.getOutputs().getOutputParameterValue( resultParameter );
EmbeddableWithArrayAggregate struct = EmbeddableWithArrayAggregate.createAggregate3();
assertStructEquals( struct, result );
}
);
}
private static void assertStructEquals(EmbeddableWithArrayAggregate struct, EmbeddableWithArrayAggregate struct2) {
assertArrayEquals( struct.getTheBinary(), struct2.getTheBinary() );
assertArrayEquals( struct.getTheString(), struct2.getTheString() );
assertArrayEquals( struct.getTheLocalDateTime(), struct2.getTheLocalDateTime() );
assertArrayEquals( struct.getTheUuid(), struct2.getTheUuid() );
}
@Entity(name = "StructHolder")
public static | StructWithArrayEmbeddableTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/model/domain/internal/BagAttributeImpl.java | {
"start": 618,
"end": 1295
} | class ____<X, E>
extends AbstractPluralAttribute<X, Collection<E>, E>
implements SqmBagPersistentAttribute<X, E> {
public BagAttributeImpl(PluralAttributeBuilder<X, Collection<E>, E, ?> xceBuilder) {
super( xceBuilder );
}
@Override
public CollectionType getCollectionType() {
return CollectionType.COLLECTION;
}
@Override
public SqmAttributeJoin<X,E> createSqmJoin(
SqmFrom<?,X> lhs,
SqmJoinType joinType,
@Nullable String alias,
boolean fetched,
SqmCreationState creationState) {
return new SqmBagJoin<>(
lhs,
this,
alias,
joinType,
fetched,
creationState.getCreationContext().getNodeBuilder()
);
}
}
| BagAttributeImpl |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/bind/ServletRequestDataBinder.java | {
"start": 8704,
"end": 10996
} | class ____ implements ValueResolver {
private final ServletRequest request;
private final WebDataBinder dataBinder;
private @Nullable Set<String> parameterNames;
protected ServletRequestValueResolver(ServletRequest request, WebDataBinder dataBinder) {
this.request = request;
this.dataBinder = dataBinder;
}
protected ServletRequest getRequest() {
return this.request;
}
@Override
public final @Nullable Object resolveValue(String name, Class<?> paramType) {
Object value = getRequestParameter(name, paramType);
if (value == null) {
value = this.dataBinder.resolvePrefixValue(name, paramType, this::getRequestParameter);
}
if (value == null) {
value = getMultipartValue(name);
}
return value;
}
protected @Nullable Object getRequestParameter(String name, Class<?> type) {
Object value = this.request.getParameterValues(name);
return (ObjectUtils.isArray(value) && Array.getLength(value) == 1 ? Array.get(value, 0) : value);
}
private @Nullable Object getMultipartValue(String name) {
MultipartRequest multipartRequest = WebUtils.getNativeRequest(this.request, MultipartRequest.class);
if (multipartRequest != null) {
List<MultipartFile> files = multipartRequest.getFiles(name);
if (!files.isEmpty()) {
return (files.size() == 1 ? files.get(0) : files);
}
}
else if (isFormDataPost(this.request)) {
HttpServletRequest httpRequest = WebUtils.getNativeRequest(this.request, HttpServletRequest.class);
if (httpRequest != null && HttpMethod.POST.matches(httpRequest.getMethod())) {
List<Part> parts = StandardServletPartUtils.getParts(httpRequest, name);
if (!parts.isEmpty()) {
return (parts.size() == 1 ? parts.get(0) : parts);
}
}
}
return null;
}
@Override
public Set<String> getNames() {
if (this.parameterNames == null) {
this.parameterNames = initParameterNames(this.request);
}
return this.parameterNames;
}
protected Set<String> initParameterNames(ServletRequest request) {
Set<String> set = new LinkedHashSet<>();
Enumeration<String> enumeration = request.getParameterNames();
while (enumeration.hasMoreElements()) {
set.add(enumeration.nextElement());
}
return set;
}
}
}
| ServletRequestValueResolver |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/json/JacksonTesterTests.java | {
"start": 1081,
"end": 2458
} | class ____ extends AbstractJsonMarshalTesterTests {
@Test
@SuppressWarnings("NullAway") // Test null check
void initFieldsWhenTestIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> JacksonTester.initFields(null, new JsonMapper()))
.withMessageContaining("'testInstance' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void initFieldsWhenMarshallerIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> JacksonTester.initFields(new InitFieldsTestClass(), (JsonMapper) null))
.withMessageContaining("'marshaller' must not be null");
}
@Test
void initFieldsShouldSetNullFields() {
InitFieldsTestClass test = new InitFieldsTestClass();
assertThat(test.test).isNull();
assertThat(test.base).isNull();
JacksonTester.initFields(test, new JsonMapper());
assertThat(test.test).isNotNull();
assertThat(test.base).isNotNull();
ResolvableType type = test.test.getType();
assertThat(type).isNotNull();
assertThat(type.resolve()).isEqualTo(List.class);
assertThat(type.resolveGeneric()).isEqualTo(ExampleObject.class);
}
@Override
protected AbstractJsonMarshalTester<Object> createTester(Class<?> resourceLoadClass, ResolvableType type) {
return new JacksonTester<>(resourceLoadClass, type, new JsonMapper());
}
abstract static | JacksonTesterTests |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/bean/types/GenericBeanTypesTest.java | {
"start": 6296,
"end": 6485
} | class ____<T extends Comparable<T>> implements Iterable<T> {
@Override
public Iterator<T> iterator() {
return null;
}
}
@Singleton
static | MyBean |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/exceptionhandling/ConstraintViolationExceptionHandlingTest.java | {
"start": 3313,
"end": 3447
} | class ____ {
@Id
private long id;
@ManyToOne(optional = false)
private AInfo aInfo;
}
@Entity(name = "AInfo")
public static | A |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java | {
"start": 5675,
"end": 6715
} | class ____ {
double originLat;
double originLon;
double offset;
double scaling;
public DecayGeoGauss(String originStr, String scaleStr, String offsetStr, double decay) {
GeoPoint origin = GeoUtils.parseGeoPoint(originStr, false);
double scale = DistanceUnit.DEFAULT.parse(scaleStr, DistanceUnit.DEFAULT);
this.originLat = origin.lat();
this.originLon = origin.lon();
this.offset = DistanceUnit.DEFAULT.parse(offsetStr, DistanceUnit.DEFAULT);
this.scaling = 0.5 * Math.pow(scale, 2.0) / Math.log(decay);
}
public double decayGeoGauss(GeoPoint docValue) {
double distance = GeoDistance.ARC.calculate(originLat, originLon, docValue.lat(), docValue.lon(), DistanceUnit.METERS);
distance = Math.max(0.0d, distance - offset);
return Math.exp(0.5 * Math.pow(distance, 2.0) / scaling);
}
}
// **** Decay functions on numeric field
public static final | DecayGeoGauss |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/persister/collection/mutation/OperationProducer.java | {
"start": 521,
"end": 632
} | interface ____ {
JdbcMutationOperation createOperation(MutatingTableReference tableReference);
}
| OperationProducer |
java | elastic__elasticsearch | x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/content/ObjectParser.java | {
"start": 1569,
"end": 1898
} | interface ____<Value, Context> {
void acceptUnknownField(
ObjectParser<Value, Context> objectParser,
String field,
JsonLocation location,
JsonParser parser,
Value value,
Context context
) throws IOException;
}
private | UnknownFieldParser |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 5898,
"end": 6080
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return dialect.getSequenceSupport().supportsSequences();
}
}
public static | SupportsSequences |
java | google__guava | android/guava-tests/test/com/google/common/collect/UnmodifiableIteratorTest.java | {
"start": 1028,
"end": 1746
} | class ____ extends TestCase {
@SuppressWarnings("DoNotCall")
public void testRemove() {
String[] array = {"a", "b", "c"};
Iterator<String> iterator =
new UnmodifiableIterator<String>() {
int i;
@Override
public boolean hasNext() {
return i < array.length;
}
@Override
public String next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
return array[i++];
}
};
assertTrue(iterator.hasNext());
assertEquals("a", iterator.next());
assertThrows(UnsupportedOperationException.class, () -> iterator.remove());
}
}
| UnmodifiableIteratorTest |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/entities/onetomany/detached/IndexedListJoinColumnBidirectionalRefIngEntity.java | {
"start": 860,
"end": 3038
} | class ____ {
@Id
@GeneratedValue
private Integer id;
private String data;
@OneToMany
@JoinColumn(name = "indexed_join_column")
@OrderColumn(name = "indexed_index")
@AuditMappedBy(mappedBy = "owner", positionMappedBy = "position")
private List<IndexedListJoinColumnBidirectionalRefEdEntity> references;
public IndexedListJoinColumnBidirectionalRefIngEntity() {
}
public IndexedListJoinColumnBidirectionalRefIngEntity(
Integer id,
String data,
IndexedListJoinColumnBidirectionalRefEdEntity... references) {
this.id = id;
this.data = data;
this.references = new ArrayList<IndexedListJoinColumnBidirectionalRefEdEntity>();
this.references.addAll( Arrays.asList( references ) );
}
public IndexedListJoinColumnBidirectionalRefIngEntity(
String data,
IndexedListJoinColumnBidirectionalRefEdEntity... references) {
this( null, data, references );
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public List<IndexedListJoinColumnBidirectionalRefEdEntity> getReferences() {
return references;
}
public void setReferences(List<IndexedListJoinColumnBidirectionalRefEdEntity> references) {
this.references = references;
}
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof IndexedListJoinColumnBidirectionalRefIngEntity) ) {
return false;
}
IndexedListJoinColumnBidirectionalRefIngEntity that = (IndexedListJoinColumnBidirectionalRefIngEntity) o;
if ( data != null ? !data.equals( that.data ) : that.data != null ) {
return false;
}
//noinspection RedundantIfStatement
if ( id != null ? !id.equals( that.id ) : that.id != null ) {
return false;
}
return true;
}
public int hashCode() {
int result;
result = (id != null ? id.hashCode() : 0);
result = 31 * result + (data != null ? data.hashCode() : 0);
return result;
}
public String toString() {
return "IndexedListJoinColumnBidirectionalRefIngEntity(id = " + id + ", data = " + data + ")";
}
}
| IndexedListJoinColumnBidirectionalRefIngEntity |
java | quarkusio__quarkus | extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/QuarkusFlywayClassProvider.java | {
"start": 148,
"end": 532
} | class ____<I> implements ClassProvider<I> {
private final Collection<Class<? extends I>> classes;
public QuarkusFlywayClassProvider(Collection<Class<? extends I>> classes) {
this.classes = Collections.unmodifiableCollection(classes);
}
@Override
public Collection<Class<? extends I>> getClasses() {
return classes;
}
}
| QuarkusFlywayClassProvider |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2195/dto/Target.java | {
"start": 202,
"end": 403
} | class ____ extends TargetBase {
protected Target(Builder builder) {
super( builder );
}
public static Builder builder() {
return new Builder();
}
public static | Target |
java | quarkusio__quarkus | integration-tests/vertx-http/src/test/java/io/quarkus/it/vertx/ServerWithTLS13Only.java | {
"start": 110,
"end": 321
} | class ____ implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Map.of(
"quarkus.tls.protocols", "TLSv1.3");
}
}
| ServerWithTLS13Only |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/window/utils/WindowJoinHelper.java | {
"start": 8421,
"end": 10003
} | class ____ implements WindowJoinProcessor {
private final boolean isAntiJoin;
public SemiAntiWindowJoinProcessor(boolean isAntiJoin) {
this.isAntiJoin = isAntiJoin;
}
@Override
public void doJoin(
@Nullable Iterable<RowData> leftRecords, @Nullable Iterable<RowData> rightRecords) {
if (leftRecords == null) {
return;
}
if (rightRecords == null) {
if (isAntiJoin) {
for (RowData leftRecord : leftRecords) {
collector.collect(leftRecord);
}
}
return;
}
for (RowData leftRecord : leftRecords) {
boolean matches = false;
for (RowData rightRecord : rightRecords) {
if (joinCondition.apply(leftRecord, rightRecord)) {
matches = true;
break;
}
}
if (matches) {
if (!isAntiJoin) {
// emit left record if there are matched rows on the other side
collector.collect(leftRecord);
}
} else {
if (isAntiJoin) {
// emit left record if there is no matched row on the other side
collector.collect(leftRecord);
}
}
}
}
}
private | SemiAntiWindowJoinProcessor |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/view/freemarker/FreeMarkerView.java | {
"start": 18581,
"end": 18715
} | class ____ extends {@link GenericServlet}.
* Needed for JSP access in FreeMarker.
*/
@SuppressWarnings("serial")
private static | that |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FunctionalInterfaceClashTest.java | {
"start": 6338,
"end": 6618
} | class ____ {
abstract String doIt(FunctionalInterface fi);
}
""")
.addSourceLines(
"pkg2/DerivedClass.java",
"""
package pkg2;
import pkg1.FunctionalInterface;
public | BaseClass |
java | mapstruct__mapstruct | integrationtest/src/test/resources/sealedSubclassTest/src/main/java/org/mapstruct/itest/sealedsubclass/Harley.java | {
"start": 206,
"end": 411
} | class ____ extends Motor {
private int engineDb;
public int getEngineDb() {
return engineDb;
}
public void setEngineDb(int engineDb) {
this.engineDb = engineDb;
}
}
| Harley |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/load/engine/DataCacheKeyTest.java | {
"start": 623,
"end": 2108
} | class ____ {
@Rule public final KeyTester keyTester = new KeyTester();
@Mock private Key firstKey;
@Mock private Key firstSignature;
@Mock private Key secondKey;
@Mock private Key secondSignature;
@Before
public void setUp() throws UnsupportedEncodingException {
MockitoAnnotations.initMocks(this);
doAnswer(new WriteDigest("firstKey"))
.when(firstKey)
.updateDiskCacheKey(any(MessageDigest.class));
doAnswer(new WriteDigest("firstSignature"))
.when(firstSignature)
.updateDiskCacheKey(any(MessageDigest.class));
doAnswer(new WriteDigest("secondKey"))
.when(secondKey)
.updateDiskCacheKey(any(MessageDigest.class));
doAnswer(new WriteDigest("secondSignature"))
.when(secondSignature)
.updateDiskCacheKey(any(MessageDigest.class));
}
@Test
public void testEqualsHashCodeDigest() throws NoSuchAlgorithmException {
keyTester
.addEquivalenceGroup(
new DataCacheKey(firstKey, firstSignature), new DataCacheKey(firstKey, firstSignature))
.addEquivalenceGroup(new DataCacheKey(firstKey, secondSignature))
.addEquivalenceGroup(new DataCacheKey(secondKey, firstSignature))
.addEquivalenceGroup(new DataCacheKey(secondKey, secondSignature))
.addRegressionTest(
new DataCacheKey(firstKey, firstSignature),
"801d7440d65a0e7c9ad0097d417f346dac4d4c4d5630724110fa3f3fe66236d9")
.test();
}
}
| DataCacheKeyTest |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockOutputByteBuffer.java | {
"start": 919,
"end": 1263
} | class ____ extends ITestS3ABlockOutputArray {
protected String getBlockOutputBufferName() {
return Constants.FAST_UPLOAD_BYTEBUFFER;
}
protected S3ADataBlocks.BlockFactory createFactory(S3AFileSystem fileSystem) {
return new S3ADataBlocks.ByteBufferBlockFactory(fileSystem.createStoreContext());
}
}
| ITestS3ABlockOutputByteBuffer |
java | qos-ch__slf4j | slf4j-nop/src/main/java/org/slf4j/nop/NOPServiceProvider.java | {
"start": 302,
"end": 1602
} | class ____ implements SLF4JServiceProvider {
/**
* Declare the version of the SLF4J API this implementation is compiled against.
* The value of this field is modified with each major release.
*/
// to avoid constant folding by the compiler, this field must *not* be final
public static String REQUESTED_API_VERSION = "2.0.99"; // !final
private final ILoggerFactory loggerFactory = new NOPLoggerFactory();
// LoggerFactory expects providers to initialize markerFactory as early as possible.
private final IMarkerFactory markerFactory;
// LoggerFactory expects providers to initialize their MDCAdapter field
// as early as possible, preferably at construction time.
private final MDCAdapter mdcAdapter;
public NOPServiceProvider() {
markerFactory = new BasicMarkerFactory();
mdcAdapter = new NOPMDCAdapter();
}
public ILoggerFactory getLoggerFactory() {
return loggerFactory;
}
public IMarkerFactory getMarkerFactory() {
return markerFactory;
}
public MDCAdapter getMDCAdapter() {
return mdcAdapter;
}
@Override
public String getRequestedApiVersion() {
return REQUESTED_API_VERSION;
}
public void initialize() {
}
}
| NOPServiceProvider |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/xml/StaxResult.java | {
"start": 1516,
"end": 1962
} | class ____ to use the {@code ContentHandler} obtained via {@link #getHandler()} to parse an
* input source using an {@code XMLReader}. Calling {@link #setHandler(org.xml.sax.ContentHandler)}
* or {@link #setLexicalHandler(org.xml.sax.ext.LexicalHandler)} will result in
* {@code UnsupportedOperationException}s.
*
* @author Arjen Poutsma
* @since 3.0
* @see XMLEventWriter
* @see XMLStreamWriter
* @see javax.xml.transform.Transformer
*/
| is |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/JooqComponentBuilderFactory.java | {
"start": 1388,
"end": 1841
} | interface ____ {
/**
* JOOQ (camel-jooq)
* Store and retrieve Java objects from an SQL database using JOOQ.
*
* Category: database
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-jooq
*
* @return the dsl builder
*/
static JooqComponentBuilder jooq() {
return new JooqComponentBuilderImpl();
}
/**
* Builder for the JOOQ component.
*/
| JooqComponentBuilderFactory |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/endpoint/WebClientReactiveJwtBearerTokenResponseClient.java | {
"start": 1636,
"end": 1783
} | class ____
extends AbstractWebClientReactiveOAuth2AccessTokenResponseClient<JwtBearerGrantRequest> {
}
| WebClientReactiveJwtBearerTokenResponseClient |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/impl/ErrorThrowingDeserializer.java | {
"start": 564,
"end": 883
} | class ____ extends ValueDeserializer<Object>
{
private final Error _cause;
public ErrorThrowingDeserializer(NoClassDefFoundError cause) {
_cause = cause;
}
@Override
public Object deserialize(JsonParser jp, DeserializationContext ctxt) {
throw _cause;
}
}
| ErrorThrowingDeserializer |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/config/builder/CrankShaft.java | {
"start": 680,
"end": 977
} | class ____ {
final Optional<Double> rodLength;
CrankShaft(Optional<Double> rodLength) { this.rodLength = rodLength; }
public Optional<Double> getRodLength() {
return rodLength;
}
static Builder builder() {
return new Builder();
}
static final | CrankShaft |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java | {
"start": 6249,
"end": 8916
} | class ____ {
public static final Logger LOG = LoggerFactory.getLogger(CacheManager.class);
private static final float MIN_CACHED_BLOCKS_PERCENT = 0.001f;
// TODO: add pending / underCached / schedule cached blocks stats.
/**
* The FSNamesystem that contains this CacheManager.
*/
private final FSNamesystem namesystem;
/**
* The BlockManager associated with the FSN that owns this CacheManager.
*/
private final BlockManager blockManager;
/**
* Cache directives, sorted by ID.
*
* listCacheDirectives relies on the ordering of elements in this map
* to track what has already been listed by the client.
*/
private final TreeMap<Long, CacheDirective> directivesById = new TreeMap<>();
/**
* The directive ID to use for a new directive. IDs always increase, and are
* never reused.
*/
private long nextDirectiveId;
/**
* Cache directives
*/
private final Multimap<String, CacheDirective> directivesByPath =
HashMultimap.create();
/**
* Cache pools, sorted by name.
*/
private final TreeMap<String, CachePool> cachePools =
new TreeMap<String, CachePool>();
/**
* Maximum number of cache pools to list in one operation.
*/
private final int maxListCachePoolsResponses;
/**
* Maximum number of cache pool directives to list in one operation.
*/
private final int maxListCacheDirectivesNumResponses;
/**
* Interval between scans in milliseconds.
*/
private final long scanIntervalMs;
/**
* All cached blocks.
*/
private final GSet<CachedBlock, CachedBlock> cachedBlocks;
/**
* Lock which protects the CacheReplicationMonitor.
*/
private final ReentrantLock crmLock = new ReentrantLock();
private final SerializerCompat serializerCompat = new SerializerCompat();
/**
* Whether caching is enabled.
*
* If caching is disabled, we will not process cache reports or store
* information about what is cached where. We also do not start the
* CacheReplicationMonitor thread. This will save resources, but provide
* less functionality.
*
* Even when caching is disabled, we still store path-based cache
* information. This information is stored in the edit log and fsimage. We
* don't want to lose it just because a configuration setting was turned off.
* However, we will not act on this information if caching is disabled.
*/
private final boolean enabled;
/**
* The CacheReplicationMonitor.
*/
private CacheReplicationMonitor monitor;
private boolean isCheckLockTimeEnable;
private long maxLockTimeMs;
private long sleepTimeMs;
public static final | CacheManager |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java | {
"start": 26930,
"end": 33044
} | enum ____ {
NEITHER,
ISOLATED,
STOPPED
}
private final String datafeedId;
private final long startTime;
private final Long endTime;
/**
* This must always be set within a synchronized block that also checks
* the value of the {@code stoppedOrIsolatedBeforeRunning} flag.
*/
private DatafeedRunner datafeedRunner;
private StoppedOrIsolated stoppedOrIsolated = StoppedOrIsolated.NEITHER;
DatafeedTask(
long id,
String type,
String action,
TaskId parentTaskId,
StartDatafeedAction.DatafeedParams params,
Map<String, String> headers
) {
super(id, type, action, "datafeed-" + params.getDatafeedId(), parentTaskId, headers);
this.datafeedId = params.getDatafeedId();
this.startTime = params.getStartTime();
this.endTime = params.getEndTime();
}
public String getDatafeedId() {
return datafeedId;
}
public long getDatafeedStartTime() {
return startTime;
}
@Nullable
public Long getEndTime() {
return endTime;
}
public boolean isLookbackOnly() {
return endTime != null;
}
/**
* Set the datafeed runner <em>if</em> the task has not already been told to stop or isolate.
* @return A {@link StoppedOrIsolated} object that indicates whether the
* datafeed task had previously been told to stop or isolate. {@code datafeedRunner}
* will only be set to the supplied value if the return value of this method is
* {@link StoppedOrIsolated#NEITHER}.
*/
StoppedOrIsolated setDatafeedRunner(DatafeedRunner datafeedRunner) {
return executeIfNotStoppedOrIsolated(() -> this.datafeedRunner = Objects.requireNonNull(datafeedRunner));
}
/**
* Run a command <em>if</em> the task has not already been told to stop or isolate.
* @param runnable The command to run.
* @return A {@link StoppedOrIsolated} object that indicates whether the datafeed task
* had previously been told to stop or isolate. {@code runnable} will only be
* run if the return value of this method is {@link StoppedOrIsolated#NEITHER}.
*/
public synchronized StoppedOrIsolated executeIfNotStoppedOrIsolated(Runnable runnable) {
if (stoppedOrIsolated == StoppedOrIsolated.NEITHER) {
runnable.run();
}
return stoppedOrIsolated;
}
@Override
protected void onCancelled() {
// If the persistent task framework wants us to stop then we should do so immediately and
// we should wait for an existing datafeed import to realize we want it to stop.
// Note that this only applied when task cancel is invoked and stop datafeed api doesn't use this.
// Also stop datafeed api will obey the timeout.
stop(getReasonCancelled(), TimeValue.ZERO);
}
@Override
public boolean shouldCancelChildrenOnCancellation() {
// onCancelled implements graceful shutdown of children
return false;
}
public void stop(String reason, TimeValue timeout) {
synchronized (this) {
stoppedOrIsolated = StoppedOrIsolated.STOPPED;
if (datafeedRunner == null) {
return;
}
}
datafeedRunner.stopDatafeed(this, reason, timeout);
}
public synchronized StoppedOrIsolated getStoppedOrIsolated() {
return stoppedOrIsolated;
}
public void isolate() {
synchronized (this) {
// Stopped takes precedence over isolated for what we report externally,
// as stopped needs to cause the persistent task to be marked as completed
// (regardless of whether it was isolated) whereas isolated but not stopped
// mustn't do this.
if (stoppedOrIsolated == StoppedOrIsolated.NEITHER) {
stoppedOrIsolated = StoppedOrIsolated.ISOLATED;
}
if (datafeedRunner == null) {
return;
}
}
datafeedRunner.isolateDatafeed(this);
}
void completeOrFailIfRequired(Exception error) {
// A task can only be completed or failed once - trying multiple times just causes log spam
if (isCompleted()) {
return;
}
if (error != null) {
markAsFailed(error);
} else {
markAsCompleted();
}
}
public GetDatafeedRunningStateAction.Response.RunningState getRunningState() {
synchronized (this) {
if (datafeedRunner == null) {
// In this case we don't know for sure if lookback has completed. It may be that the
// datafeed has just moved nodes, but with so little delay that there's no lookback to
// do on the new node. However, there _might_ be some catching up required, so it's
// reasonable to say real-time running hasn't started yet. The state will quickly
// change once the datafeed runner gets going and determines where the datafeed is up
// to.
return new GetDatafeedRunningStateAction.Response.RunningState(endTime == null, false, null);
}
}
return new GetDatafeedRunningStateAction.Response.RunningState(
endTime == null,
datafeedRunner.finishedLookBack(this),
datafeedRunner.getSearchInterval(this)
);
}
}
/**
* Important: the methods of this | StoppedOrIsolated |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeSerializerTest.java | {
"start": 12340,
"end": 13070
} | class ____ {
private long bookId;
private String title;
private long authorId;
public Book() {}
public Book(long bookId, String title, long authorId) {
this.bookId = bookId;
this.title = title;
this.authorId = authorId;
}
@Override
public boolean equals(Object obj) {
if (obj.getClass() == Book.class) {
Book other = (Book) obj;
return other.bookId == this.bookId
&& other.authorId == this.authorId
&& this.title.equals(other.title);
} else {
return false;
}
}
}
public static | Book |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/dev/HibernateOrmDevControllerFailingDDLGenerationTestCase.java | {
"start": 359,
"end": 1359
} | class ____ {
@RegisterExtension
final static QuarkusDevModeTest TEST = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyEntityWithFailingDDLGeneration.class,
TypeWithUnsupportedSqlCode.class,
H2CustomDialect.class,
HibernateOrmDevInfoServiceTestResource.class)
.addAsResource("application-generation-none-customh2.properties", "application.properties")
.addAsResource("import-custom-table-name.sql", "import.sql"));
@Test
public void infoAvailableButWithException() {
RestAssured.given()
.param("expectedCreateDDLContent", "EXCEPTION")
.param("expectedDropDDLContent", "EXCEPTION")
.when().get("/dev-info/check-pu-info-with-failing-ddl-generation")
.then().body(is("OK"));
}
}
| HibernateOrmDevControllerFailingDDLGenerationTestCase |
java | apache__spark | sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDFWildcardList.java | {
"start": 1017,
"end": 1144
} | class ____ extends UDF {
public List<?> evaluate(Object o) {
return Collections.singletonList("data1");
}
}
| UDFWildcardList |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/filesystem/FsCheckpointStorageLocation.java | {
"start": 1480,
"end": 5516
} | class ____ extends FsCheckpointStreamFactory
implements CheckpointStorageLocation {
private final FileSystem fileSystem;
private final Path checkpointDirectory;
private final Path sharedStateDirectory;
private final Path taskOwnedStateDirectory;
private final Path metadataFilePath;
private final CheckpointStorageLocationReference reference;
private final int fileStateSizeThreshold;
private final int writeBufferSize;
public FsCheckpointStorageLocation(
FileSystem fileSystem,
Path checkpointDir,
Path sharedStateDir,
Path taskOwnedStateDir,
CheckpointStorageLocationReference reference,
int fileStateSizeThreshold,
int writeBufferSize) {
super(fileSystem, checkpointDir, sharedStateDir, fileStateSizeThreshold, writeBufferSize);
checkArgument(fileStateSizeThreshold >= 0);
checkArgument(writeBufferSize >= 0);
this.fileSystem = checkNotNull(fileSystem);
this.checkpointDirectory = checkNotNull(checkpointDir);
this.sharedStateDirectory = checkNotNull(sharedStateDir);
this.taskOwnedStateDirectory = checkNotNull(taskOwnedStateDir);
this.reference = checkNotNull(reference);
// the metadata file should not have entropy in its path
Path metadataDir = EntropyInjector.removeEntropyMarkerIfPresent(fileSystem, checkpointDir);
this.metadataFilePath =
new Path(metadataDir, AbstractFsCheckpointStorageAccess.METADATA_FILE_NAME);
this.fileStateSizeThreshold = fileStateSizeThreshold;
this.writeBufferSize = writeBufferSize;
}
// ------------------------------------------------------------------------
// Properties
// ------------------------------------------------------------------------
public Path getCheckpointDirectory() {
return checkpointDirectory;
}
public Path getSharedStateDirectory() {
return sharedStateDirectory;
}
public Path getTaskOwnedStateDirectory() {
return taskOwnedStateDirectory;
}
public Path getMetadataFilePath() {
return metadataFilePath;
}
// ------------------------------------------------------------------------
// checkpoint metadata
// ------------------------------------------------------------------------
@Override
public CheckpointMetadataOutputStream createMetadataOutputStream() throws IOException {
return new FsCheckpointMetadataOutputStream(
fileSystem, metadataFilePath, checkpointDirectory);
}
@Override
public void disposeOnFailure() throws IOException {
// on a failure, no chunk in the checkpoint directory needs to be saved, so
// we can drop it as a whole
fileSystem.delete(checkpointDirectory, true);
}
@Override
public CheckpointStorageLocationReference getLocationReference() {
return reference;
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
@Override
public String toString() {
return "FsCheckpointStorageLocation {"
+ "fileSystem="
+ fileSystem
+ ", checkpointDirectory="
+ checkpointDirectory
+ ", sharedStateDirectory="
+ sharedStateDirectory
+ ", taskOwnedStateDirectory="
+ taskOwnedStateDirectory
+ ", metadataFilePath="
+ metadataFilePath
+ ", reference="
+ reference
+ ", fileStateSizeThreshold="
+ fileStateSizeThreshold
+ ", writeBufferSize="
+ writeBufferSize
+ '}';
}
@VisibleForTesting
FileSystem getFileSystem() {
return fileSystem;
}
}
| FsCheckpointStorageLocation |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/windowing/sessionwindows/SessionEventGeneratorImpl.java | {
"start": 11951,
"end": 12807
} | class ____ extends AbstractEventGenerator {
@Override
public E generateEvent(long globalWatermark) {
return createEventFromTimestamp(
generateArbitraryInSessionTimestamp(), globalWatermark, Timing.AFTER_LATENESS);
}
@Override
public long getLocalWatermark() {
return getAfterLatenessTimestamp();
}
@Override
public boolean canGenerateEventAtWatermark(long globalWatermark) {
return isAfterLateness(globalWatermark);
}
@Override
public boolean hasMoreEvents() {
return true;
}
@Override
public EventGenerator<K, E> getNextGenerator(long globalWatermark) {
throw new IllegalStateException("This generator has no successor");
}
}
}
| AfterLatenessGenerator |
java | spring-projects__spring-boot | module/spring-boot-graphql-test/src/test/java/org/springframework/boot/graphql/test/autoconfigure/GraphQlTypeExcludeFilterTests.java | {
"start": 7677,
"end": 7880
} | class ____ implements WebGraphQlInterceptor {
@Override
public Mono<WebGraphQlResponse> intercept(WebGraphQlRequest request, Chain chain) {
return Mono.empty();
}
}
static | ExampleWebInterceptor |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/BlockingOperationRecorder.java | {
"start": 119,
"end": 320
} | class ____ {
public void control(List<IOThreadDetector> detectors) {
BlockingOperationControl.setIoThreadDetector(detectors.toArray(new IOThreadDetector[0]));
}
}
| BlockingOperationRecorder |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/conversion/bignumbers/BigDecimalTarget.java | {
"start": 239,
"end": 2253
} | class ____ {
private byte b;
private Byte bb;
private short s;
private Short ss;
private int i;
private Integer ii;
private long l;
private Long ll;
private float f;
private Float ff;
private double d;
private Double dd;
private String string;
private BigInteger bigInteger;
public byte getB() {
return b;
}
public void setB(byte b) {
this.b = b;
}
public Byte getBb() {
return bb;
}
public void setBb(Byte bb) {
this.bb = bb;
}
public short getS() {
return s;
}
public void setS(short s) {
this.s = s;
}
public Short getSs() {
return ss;
}
public void setSs(Short ss) {
this.ss = ss;
}
public int getI() {
return i;
}
public void setI(int i) {
this.i = i;
}
public Integer getIi() {
return ii;
}
public void setIi(Integer ii) {
this.ii = ii;
}
public long getL() {
return l;
}
public void setL(long l) {
this.l = l;
}
public Long getLl() {
return ll;
}
public void setLl(Long ll) {
this.ll = ll;
}
public float getF() {
return f;
}
public void setF(float f) {
this.f = f;
}
public Float getFf() {
return ff;
}
public void setFf(Float ff) {
this.ff = ff;
}
public double getD() {
return d;
}
public void setD(double d) {
this.d = d;
}
public Double getDd() {
return dd;
}
public void setDd(Double dd) {
this.dd = dd;
}
public String getString() {
return string;
}
public void setString(String string) {
this.string = string;
}
public BigInteger getBigInteger() {
return bigInteger;
}
public void setBigInteger(BigInteger bigInteger) {
this.bigInteger = bigInteger;
}
}
| BigDecimalTarget |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/context/annotation/DubboConfigConfigurationRegistrar.java | {
"start": 1455,
"end": 1766
} | class ____ implements ImportBeanDefinitionRegistrar {
@Override
public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata, BeanDefinitionRegistry registry) {
// initialize dubbo beans
DubboSpringInitializer.initialize(registry);
}
}
| DubboConfigConfigurationRegistrar |
java | alibaba__nacos | naming/src/test/java/com/alibaba/nacos/naming/utils/NamingRequestUtilTest.java | {
"start": 1265,
"end": 3307
} | class ____ {
@Mock
HttpServletRequest request;
@Mock
RequestMeta meta;
@BeforeEach
void setUp() {
RequestContextHolder.getContext().getBasicContext().getAddressContext().setRemoteIp("1.1.1.1");
RequestContextHolder.getContext().getBasicContext().getAddressContext().setSourceIp("2.2.2.2");
}
@AfterEach
void tearDown() {
RequestContextHolder.removeContext();
}
@Test
void testGetSourceIp() {
assertEquals("2.2.2.2", NamingRequestUtil.getSourceIp());
RequestContextHolder.getContext().getBasicContext().getAddressContext().setSourceIp(null);
assertEquals("1.1.1.1", NamingRequestUtil.getSourceIp());
RequestContextHolder.getContext().getBasicContext().getAddressContext().setRemoteIp(null);
assertNull(NamingRequestUtil.getSourceIp());
}
@Test
void getSourceIpForHttpRequest() {
when(request.getRemoteAddr()).thenReturn("3.3.3.3");
assertEquals("2.2.2.2", NamingRequestUtil.getSourceIpForHttpRequest(request));
RequestContextHolder.getContext().getBasicContext().getAddressContext().setSourceIp(null);
assertEquals("1.1.1.1", NamingRequestUtil.getSourceIpForHttpRequest(request));
RequestContextHolder.getContext().getBasicContext().getAddressContext().setRemoteIp(null);
assertEquals("3.3.3.3", NamingRequestUtil.getSourceIpForHttpRequest(request));
}
@Test
void getSourceIpForGrpcRequest() {
when(meta.getClientIp()).thenReturn("3.3.3.3");
assertEquals("2.2.2.2", NamingRequestUtil.getSourceIpForGrpcRequest(meta));
RequestContextHolder.getContext().getBasicContext().getAddressContext().setSourceIp(null);
assertEquals("1.1.1.1", NamingRequestUtil.getSourceIpForGrpcRequest(meta));
RequestContextHolder.getContext().getBasicContext().getAddressContext().setRemoteIp(null);
assertEquals("3.3.3.3", NamingRequestUtil.getSourceIpForGrpcRequest(meta));
}
} | NamingRequestUtilTest |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/CopyableValueComparatorTest.java | {
"start": 1092,
"end": 2031
} | class ____ extends ComparatorTestBase<StringValue> {
StringValue[] data =
new StringValue[] {
new StringValue(""),
new StringValue("Lorem Ipsum Dolor Omit Longer"),
new StringValue("aaaa"),
new StringValue("abcd"),
new StringValue("abce"),
new StringValue("abdd"),
new StringValue("accd"),
new StringValue("bbcd")
};
@Override
protected TypeComparator<StringValue> createComparator(boolean ascending) {
return new CopyableValueComparator<StringValue>(ascending, StringValue.class);
}
@Override
protected TypeSerializer<StringValue> createSerializer() {
return new CopyableValueSerializer<StringValue>(StringValue.class);
}
@Override
protected StringValue[] getSortedTestData() {
return data;
}
}
| CopyableValueComparatorTest |
java | apache__camel | components/camel-robotframework/src/test/java/org/apache/camel/component/robotframework/RobotFrameworkComponentTest.java | {
"start": 1190,
"end": 7223
} | class ____ extends CamelTestSupport {
@Test
public void testRobotFrameworkCamelBodyAsString() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
MockEndpoint mockString = getMockEndpoint("mock:resultString");
mock.expectedMinimumMessageCount(1);
mockString.expectedMinimumMessageCount(1);
template.sendBody("direct:setVariableCamelBody", "Hello Robot");
template.sendBody("direct:assertRobotCamelInputAsString", "Hello Robot");
MockEndpoint.assertIsSatisfied(context);
Exchange exchange = mock.getExchanges().get(0);
assertEquals(0, (int) ObjectHelper.cast(Integer.class,
exchange.getIn().getHeader(RobotFrameworkCamelConstants.CAMEL_ROBOT_RETURN_CODE)));
Exchange exchangeString = mockString.getExchanges().get(0);
assertEquals(0, (int) ObjectHelper.cast(Integer.class,
exchangeString.getIn().getHeader(RobotFrameworkCamelConstants.CAMEL_ROBOT_RETURN_CODE)));
}
@Test
public void testRobotFrameworkCamelBodyAsNumeric() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
MockEndpoint mockNumeric = getMockEndpoint("mock:resultNumeric");
mock.expectedMinimumMessageCount(1);
mockNumeric.expectedMinimumMessageCount(1);
template.sendBody("direct:setVariableCamelBody", 1);
template.sendBody("direct:assertRobotCamelInputAsNumeric", 1);
MockEndpoint.assertIsSatisfied(context);
Exchange exchange = mock.getExchanges().get(0);
assertEquals(0, (int) ObjectHelper.cast(Integer.class,
exchange.getIn().getHeader(RobotFrameworkCamelConstants.CAMEL_ROBOT_RETURN_CODE)));
Exchange exchangeNumeric = mockNumeric.getExchanges().get(0);
assertEquals(0, (int) ObjectHelper.cast(Integer.class,
exchangeNumeric.getIn().getHeader(RobotFrameworkCamelConstants.CAMEL_ROBOT_RETURN_CODE)));
}
@Test
public void testRobotFrameworkCamelBodyAndHeaderAsString() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:resultHeader");
mock.expectedMinimumMessageCount(1);
template.sendBodyAndHeader("direct:setVariableCamelBodyAndHeader", "Hello Robot", "stringKey", "headerValue");
MockEndpoint.assertIsSatisfied(context);
Exchange exchange = mock.getExchanges().get(0);
assertEquals(0, (int) ObjectHelper.cast(Integer.class,
exchange.getIn().getHeader(RobotFrameworkCamelConstants.CAMEL_ROBOT_RETURN_CODE)));
}
@Test
public void testRobotFrameworkCamelBodyAndPropertyAsString() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:resultProperty");
mock.expectedMinimumMessageCount(1);
template.sendBodyAndProperty("direct:setVariableCamelBodyAndProperty", "Hello Robot", "stringKey", "propertyValue");
MockEndpoint.assertIsSatisfied(context);
Exchange exchange = mock.getExchanges().get(0);
assertEquals(0, (int) ObjectHelper.cast(Integer.class,
exchange.getIn().getHeader(RobotFrameworkCamelConstants.CAMEL_ROBOT_RETURN_CODE)));
}
@Test
public void testRobotFrameworkResourceUriHeader() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:resultResourceUri");
mock.expectedMinimumMessageCount(1);
template.sendBody("direct:setVariableCamelBodyResourceUri", "Hello Robot");
MockEndpoint.assertIsSatisfied(context);
Exchange exchange = mock.getExchanges().get(0);
assertEquals(0, (int) ObjectHelper.cast(Integer.class,
exchange.getIn().getHeader(RobotFrameworkCamelConstants.CAMEL_ROBOT_RETURN_CODE)));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
RobotFrameworkComponent rf = context.getComponent("robotframework", RobotFrameworkComponent.class);
rf.getConfiguration().setOutputDirectory("target");
from("direct:setVariableCamelBody").to(
"robotframework:src/test/resources/org/apache/camel/component/robotframework/set_variable_camel_body.robot?xunitFile=target/out.xml")
.to("mock:result");
from("direct:assertRobotCamelInputAsString")
.to("robotframework:src/test/resources/org/apache/camel/component/robotframework/assert_string_robot_with_camel_exchange_value_as_string.robot?xunitFile=target/out.xml")
.to("mock:resultString");
from("direct:assertRobotCamelInputAsNumeric")
.to("robotframework:src/test/resources/org/apache/camel/component/robotframework/assert_string_robot_with_camel_exchange_value_as_numeric.robot?xunitFile=target/out.xml")
.to("mock:resultNumeric");
from("direct:setVariableCamelBodyAndHeader").to(
"robotframework:src/test/resources/org/apache/camel/component/robotframework/set_variable_camel_header.robot?xunitFile=target/out.xml")
.to("mock:resultHeader");
from("direct:setVariableCamelBodyAndProperty").to(
"robotframework:src/test/resources/org/apache/camel/component/robotframework/set_variable_camel_property.robot?xunitFile=target/out.xml&allowContextMapAll=true")
.to("mock:resultProperty");
from("direct:setVariableCamelBodyResourceUri")
.setHeader(RobotFrameworkCamelConstants.CAMEL_ROBOT_RESOURCE_URI)
.constant("src/test/resources/org/apache/camel/component/robotframework/set_variable_camel_body.robot")
.to("robotframework:dummy?xunitFile=target/out.xml&allowTemplateFromHeader=true")
.to("mock:resultResourceUri");
}
};
}
}
| RobotFrameworkComponentTest |
java | spring-projects__spring-boot | module/spring-boot-pulsar/src/test/java/org/springframework/boot/pulsar/autoconfigure/PulsarPropertiesTests.java | {
"start": 19174,
"end": 20012
} | class ____ {
@Test
void bind() {
Map<String, String> map = new HashMap<>();
map.put("spring.pulsar.reader.name", "my-reader");
map.put("spring.pulsar.reader.topics", "my-topic");
map.put("spring.pulsar.reader.subscription-name", "my-subscription");
map.put("spring.pulsar.reader.subscription-role-prefix", "sub-role");
map.put("spring.pulsar.reader.read-compacted", "true");
PulsarProperties.Reader properties = bindProperties(map).getReader();
assertThat(properties.getName()).isEqualTo("my-reader");
assertThat(properties.getTopics()).containsExactly("my-topic");
assertThat(properties.getSubscriptionName()).isEqualTo("my-subscription");
assertThat(properties.getSubscriptionRolePrefix()).isEqualTo("sub-role");
assertThat(properties.isReadCompacted()).isTrue();
}
}
@Nested
| ReaderProperties |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-zookeeper-curator5/src/test/java/org/apache/dubbo/remoting/zookeeper/curator5/Curator5ZookeeperClientManagerTest.java | {
"start": 1837,
"end": 4301
} | class ____ {
private ZookeeperClient zookeeperClient;
private static URL zookeeperUrl;
private static MockedStatic<CuratorFrameworkFactory> curatorFrameworkFactoryMockedStatic;
private static CuratorFramework mockCuratorFramework;
@BeforeAll
public static void beforeAll() {
String zookeeperConnectionAddress1 = "zookeeper://127.0.0.1:2181";
zookeeperUrl = URL.valueOf(zookeeperConnectionAddress1 + "/service");
CuratorFrameworkFactory.Builder realBuilder = CuratorFrameworkFactory.builder();
CuratorFrameworkFactory.Builder spyBuilder = spy(realBuilder);
curatorFrameworkFactoryMockedStatic = mockStatic(CuratorFrameworkFactory.class);
curatorFrameworkFactoryMockedStatic
.when(CuratorFrameworkFactory::builder)
.thenReturn(spyBuilder);
mockCuratorFramework = mock(CuratorFramework.class);
doReturn(mockCuratorFramework).when(spyBuilder).build();
}
@BeforeEach
public void setUp() throws InterruptedException {
when(mockCuratorFramework.blockUntilConnected(anyInt(), any())).thenReturn(true);
when(mockCuratorFramework.getConnectionStateListenable()).thenReturn(StandardListenerManager.standard());
zookeeperClient = new ZookeeperClientManager().connect(zookeeperUrl);
}
@Test
void testZookeeperClient() {
assertThat(zookeeperClient, not(nullValue()));
zookeeperClient.close();
}
@Test
void testRegistryCheckConnectDefault() throws InterruptedException {
when(mockCuratorFramework.blockUntilConnected(anyInt(), any())).thenReturn(false);
ZookeeperClientManager zookeeperClientManager = new ZookeeperClientManager();
Assertions.assertThrowsExactly(IllegalStateException.class, () -> {
zookeeperClientManager.connect(zookeeperUrl);
});
}
@Test
void testRegistryNotCheckConnect() throws InterruptedException {
when(mockCuratorFramework.blockUntilConnected(anyInt(), any())).thenReturn(false);
URL url = zookeeperUrl.addParameter(CHECK_KEY, false);
ZookeeperClientManager zookeeperClientManager = new ZookeeperClientManager();
Assertions.assertDoesNotThrow(() -> {
zookeeperClientManager.connect(url);
});
}
@AfterAll
public static void afterAll() {
curatorFrameworkFactoryMockedStatic.close();
}
}
| Curator5ZookeeperClientManagerTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetPriorityActionTests.java | {
"start": 650,
"end": 4577
} | class ____ extends AbstractActionTestCase<SetPriorityAction> {
private final int priority = randomIntBetween(0, Integer.MAX_VALUE);
static SetPriorityAction randomInstance() {
return new SetPriorityAction(randomIntBetween(2, Integer.MAX_VALUE - 1));
}
@Override
protected SetPriorityAction doParseInstance(XContentParser parser) {
return SetPriorityAction.parse(parser);
}
@Override
protected SetPriorityAction createTestInstance() {
return new SetPriorityAction(priority);
}
@Override
protected SetPriorityAction mutateInstance(SetPriorityAction instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Reader<SetPriorityAction> instanceReader() {
return SetPriorityAction::new;
}
public void testNonPositivePriority() {
Exception e = expectThrows(Exception.class, () -> new SetPriorityAction(randomIntBetween(-100, -1)));
assertThat(e.getMessage(), equalTo("[priority] must be 0 or greater"));
}
public void testNullPriorityAllowed() {
SetPriorityAction nullPriority = new SetPriorityAction((Integer) null);
assertNull(nullPriority.recoveryPriority);
}
public void testToSteps() {
SetPriorityAction action = createTestInstance();
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertNotNull(steps);
assertEquals(1, steps.size());
StepKey expectedFirstStepKey = new StepKey(phase, SetPriorityAction.NAME, SetPriorityAction.NAME);
UpdateSettingsStep firstStep = (UpdateSettingsStep) steps.get(0);
assertThat(firstStep.getKey(), equalTo(expectedFirstStepKey));
assertThat(firstStep.getNextStepKey(), equalTo(nextStepKey));
assertThat(firstStep.getSettingsSupplier().apply(null).size(), equalTo(1));
assertEquals(priority, (long) IndexMetadata.INDEX_PRIORITY_SETTING.get(firstStep.getSettingsSupplier().apply(null)));
}
public void testNullPriorityStep() {
SetPriorityAction action = new SetPriorityAction((Integer) null);
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertNotNull(steps);
assertEquals(1, steps.size());
StepKey expectedFirstStepKey = new StepKey(phase, SetPriorityAction.NAME, SetPriorityAction.NAME);
UpdateSettingsStep firstStep = (UpdateSettingsStep) steps.get(0);
assertThat(firstStep.getKey(), equalTo(expectedFirstStepKey));
assertThat(firstStep.getNextStepKey(), equalTo(nextStepKey));
assertThat(firstStep.getSettingsSupplier().apply(null).size(), equalTo(1));
assertThat(
IndexMetadata.INDEX_PRIORITY_SETTING.get(firstStep.getSettingsSupplier().apply(null)),
equalTo(IndexMetadata.INDEX_PRIORITY_SETTING.getDefault(firstStep.getSettingsSupplier().apply(null)))
);
}
public void testEqualsAndHashCode() {
EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copy, this::notCopy);
}
SetPriorityAction copy(SetPriorityAction setPriorityAction) {
return new SetPriorityAction(setPriorityAction.recoveryPriority);
}
SetPriorityAction notCopy(SetPriorityAction setPriorityAction) {
return new SetPriorityAction(setPriorityAction.recoveryPriority + 1);
}
}
| SetPriorityActionTests |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java | {
"start": 453,
"end": 40948
} | class ____ implements SqlBaseListener {
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSingleStatement(SqlBaseParser.SingleStatementContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSingleStatement(SqlBaseParser.SingleStatementContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSingleExpression(SqlBaseParser.SingleExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSingleExpression(SqlBaseParser.SingleExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterStatementDefault(SqlBaseParser.StatementDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitStatementDefault(SqlBaseParser.StatementDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterExplain(SqlBaseParser.ExplainContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitExplain(SqlBaseParser.ExplainContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterDebug(SqlBaseParser.DebugContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitDebug(SqlBaseParser.DebugContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterShowTables(SqlBaseParser.ShowTablesContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitShowTables(SqlBaseParser.ShowTablesContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterShowColumns(SqlBaseParser.ShowColumnsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitShowColumns(SqlBaseParser.ShowColumnsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterShowFunctions(SqlBaseParser.ShowFunctionsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitShowFunctions(SqlBaseParser.ShowFunctionsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterShowSchemas(SqlBaseParser.ShowSchemasContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitShowSchemas(SqlBaseParser.ShowSchemasContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterShowCatalogs(SqlBaseParser.ShowCatalogsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitShowCatalogs(SqlBaseParser.ShowCatalogsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSysTables(SqlBaseParser.SysTablesContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSysTables(SqlBaseParser.SysTablesContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSysColumns(SqlBaseParser.SysColumnsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSysColumns(SqlBaseParser.SysColumnsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSysTypes(SqlBaseParser.SysTypesContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSysTypes(SqlBaseParser.SysTypesContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterQuery(SqlBaseParser.QueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitQuery(SqlBaseParser.QueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterLimitClause(SqlBaseParser.LimitClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitLimitClause(SqlBaseParser.LimitClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterQueryPrimaryDefault(SqlBaseParser.QueryPrimaryDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitQueryPrimaryDefault(SqlBaseParser.QueryPrimaryDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSubquery(SqlBaseParser.SubqueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSubquery(SqlBaseParser.SubqueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterOrderBy(SqlBaseParser.OrderByContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitOrderBy(SqlBaseParser.OrderByContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterQuerySpecification(SqlBaseParser.QuerySpecificationContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitQuerySpecification(SqlBaseParser.QuerySpecificationContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterFromClause(SqlBaseParser.FromClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitFromClause(SqlBaseParser.FromClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterGroupBy(SqlBaseParser.GroupByContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitGroupBy(SqlBaseParser.GroupByContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSingleGroupingSet(SqlBaseParser.SingleGroupingSetContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSingleGroupingSet(SqlBaseParser.SingleGroupingSetContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterGroupingExpressions(SqlBaseParser.GroupingExpressionsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitGroupingExpressions(SqlBaseParser.GroupingExpressionsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterNamedQuery(SqlBaseParser.NamedQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitNamedQuery(SqlBaseParser.NamedQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterTopClause(SqlBaseParser.TopClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitTopClause(SqlBaseParser.TopClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSetQuantifier(SqlBaseParser.SetQuantifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSelectItems(SqlBaseParser.SelectItemsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSelectItems(SqlBaseParser.SelectItemsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSelectExpression(SqlBaseParser.SelectExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSelectExpression(SqlBaseParser.SelectExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterRelation(SqlBaseParser.RelationContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitRelation(SqlBaseParser.RelationContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterJoinRelation(SqlBaseParser.JoinRelationContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitJoinRelation(SqlBaseParser.JoinRelationContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterJoinType(SqlBaseParser.JoinTypeContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitJoinType(SqlBaseParser.JoinTypeContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterJoinCriteria(SqlBaseParser.JoinCriteriaContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitJoinCriteria(SqlBaseParser.JoinCriteriaContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterTableName(SqlBaseParser.TableNameContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitTableName(SqlBaseParser.TableNameContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterAliasedQuery(SqlBaseParser.AliasedQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitAliasedQuery(SqlBaseParser.AliasedQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterAliasedRelation(SqlBaseParser.AliasedRelationContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterPivotClause(SqlBaseParser.PivotClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitPivotClause(SqlBaseParser.PivotClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterPivotArgs(SqlBaseParser.PivotArgsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitPivotArgs(SqlBaseParser.PivotArgsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterExpression(SqlBaseParser.ExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitExpression(SqlBaseParser.ExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterLogicalNot(SqlBaseParser.LogicalNotContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitLogicalNot(SqlBaseParser.LogicalNotContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterStringQuery(SqlBaseParser.StringQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitStringQuery(SqlBaseParser.StringQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterBooleanDefault(SqlBaseParser.BooleanDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitBooleanDefault(SqlBaseParser.BooleanDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterExists(SqlBaseParser.ExistsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitExists(SqlBaseParser.ExistsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterMultiMatchQuery(SqlBaseParser.MultiMatchQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitMultiMatchQuery(SqlBaseParser.MultiMatchQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterMatchQuery(SqlBaseParser.MatchQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitMatchQuery(SqlBaseParser.MatchQueryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterLogicalBinary(SqlBaseParser.LogicalBinaryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitLogicalBinary(SqlBaseParser.LogicalBinaryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterMatchQueryOptions(SqlBaseParser.MatchQueryOptionsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitMatchQueryOptions(SqlBaseParser.MatchQueryOptionsContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterPredicated(SqlBaseParser.PredicatedContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitPredicated(SqlBaseParser.PredicatedContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterPredicate(SqlBaseParser.PredicateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitPredicate(SqlBaseParser.PredicateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterLikePattern(SqlBaseParser.LikePatternContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitLikePattern(SqlBaseParser.LikePatternContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterPattern(SqlBaseParser.PatternContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitPattern(SqlBaseParser.PatternContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterPatternEscape(SqlBaseParser.PatternEscapeContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitPatternEscape(SqlBaseParser.PatternEscapeContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterValueExpressionDefault(SqlBaseParser.ValueExpressionDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitValueExpressionDefault(SqlBaseParser.ValueExpressionDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterComparison(SqlBaseParser.ComparisonContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitComparison(SqlBaseParser.ComparisonContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterDereference(SqlBaseParser.DereferenceContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitDereference(SqlBaseParser.DereferenceContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterCast(SqlBaseParser.CastContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitCast(SqlBaseParser.CastContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterConstantDefault(SqlBaseParser.ConstantDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterExtract(SqlBaseParser.ExtractContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitExtract(SqlBaseParser.ExtractContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterStar(SqlBaseParser.StarContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitStar(SqlBaseParser.StarContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterFunction(SqlBaseParser.FunctionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitFunction(SqlBaseParser.FunctionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterCase(SqlBaseParser.CaseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitCase(SqlBaseParser.CaseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterBuiltinDateTimeFunction(SqlBaseParser.BuiltinDateTimeFunctionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitBuiltinDateTimeFunction(SqlBaseParser.BuiltinDateTimeFunctionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterCastExpression(SqlBaseParser.CastExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitCastExpression(SqlBaseParser.CastExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterCastTemplate(SqlBaseParser.CastTemplateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterConvertTemplate(SqlBaseParser.ConvertTemplateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitConvertTemplate(SqlBaseParser.ConvertTemplateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterFunctionName(SqlBaseParser.FunctionNameContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitFunctionName(SqlBaseParser.FunctionNameContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterNullLiteral(SqlBaseParser.NullLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitNullLiteral(SqlBaseParser.NullLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterIntervalLiteral(SqlBaseParser.IntervalLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitIntervalLiteral(SqlBaseParser.IntervalLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterNumericLiteral(SqlBaseParser.NumericLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitNumericLiteral(SqlBaseParser.NumericLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterBooleanLiteral(SqlBaseParser.BooleanLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitBooleanLiteral(SqlBaseParser.BooleanLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterStringLiteral(SqlBaseParser.StringLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitStringLiteral(SqlBaseParser.StringLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterParamLiteral(SqlBaseParser.ParamLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterComparisonOperator(SqlBaseParser.ComparisonOperatorContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitComparisonOperator(SqlBaseParser.ComparisonOperatorContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterBooleanValue(SqlBaseParser.BooleanValueContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitBooleanValue(SqlBaseParser.BooleanValueContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterInterval(SqlBaseParser.IntervalContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitInterval(SqlBaseParser.IntervalContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterIntervalField(SqlBaseParser.IntervalFieldContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitIntervalField(SqlBaseParser.IntervalFieldContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterPrimitiveDataType(SqlBaseParser.PrimitiveDataTypeContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitPrimitiveDataType(SqlBaseParser.PrimitiveDataTypeContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterQualifiedName(SqlBaseParser.QualifiedNameContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitQualifiedName(SqlBaseParser.QualifiedNameContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterIdentifier(SqlBaseParser.IdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitIdentifier(SqlBaseParser.IdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterTableIdentifier(SqlBaseParser.TableIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitTableIdentifier(SqlBaseParser.TableIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitQuotedIdentifier(SqlBaseParser.QuotedIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterBackQuotedIdentifier(SqlBaseParser.BackQuotedIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitBackQuotedIdentifier(SqlBaseParser.BackQuotedIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterUnquotedIdentifier(SqlBaseParser.UnquotedIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitUnquotedIdentifier(SqlBaseParser.UnquotedIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterDigitIdentifier(SqlBaseParser.DigitIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitDigitIdentifier(SqlBaseParser.DigitIdentifierContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterDecimalLiteral(SqlBaseParser.DecimalLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitDecimalLiteral(SqlBaseParser.DecimalLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterIntegerLiteral(SqlBaseParser.IntegerLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitIntegerLiteral(SqlBaseParser.IntegerLiteralContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterString(SqlBaseParser.StringContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitString(SqlBaseParser.StringContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterWhenClause(SqlBaseParser.WhenClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitWhenClause(SqlBaseParser.WhenClauseContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterNonReserved(SqlBaseParser.NonReservedContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitNonReserved(SqlBaseParser.NonReservedContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void enterEveryRule(ParserRuleContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void exitEveryRule(ParserRuleContext ctx) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void visitTerminal(TerminalNode node) {}
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override
public void visitErrorNode(ErrorNode node) {}
}
| SqlBaseBaseListener |
java | apache__camel | components/camel-protobuf/src/main/java/org/apache/camel/dataformat/protobuf/ProtobufDataFormat.java | {
"start": 2383,
"end": 8898
} | class ____ extends ServiceSupport
implements DataFormat, DataFormatName, DataFormatContentTypeHeader, CamelContextAware {
public static final String CONTENT_TYPE_FORMAT_NATIVE = "native";
public static final String CONTENT_TYPE_FORMAT_JSON = "json";
private static final String CONTENT_TYPE_HEADER_NATIVE = "application/octet-stream";
private static final String CONTENT_TYPE_HEADER_JSON = "application/json";
private CamelContext camelContext;
private Message defaultInstance;
private String instanceClass;
private boolean contentTypeHeader = true;
private String contentTypeFormat = CONTENT_TYPE_FORMAT_NATIVE;
public ProtobufDataFormat() {
}
public ProtobufDataFormat(Message defaultInstance) {
this.defaultInstance = defaultInstance;
}
public ProtobufDataFormat(Message defaultInstance, String contentTypeFormat) {
this.defaultInstance = defaultInstance;
this.contentTypeFormat = contentTypeFormat;
}
@Override
public String getDataFormatName() {
return "protobuf";
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
public void setDefaultInstance(Message instance) {
this.defaultInstance = instance;
}
public void setDefaultInstance(Object instance) {
if (instance instanceof Message) {
this.defaultInstance = (Message) instance;
} else {
throw new IllegalArgumentException(
"The argument for setDefaultInstance should be subClass of com.google.protobuf.Message");
}
}
public void setInstanceClass(String className) {
ObjectHelper.notNull(className, "ProtobufDataFormat instanceClass");
instanceClass = className;
}
public String getInstanceClass() {
return instanceClass;
}
public void setContentTypeHeader(boolean contentTypeHeader) {
this.contentTypeHeader = contentTypeHeader;
}
public boolean isContentTypeHeader() {
return contentTypeHeader;
}
public String getContentTypeFormat() {
return contentTypeFormat;
}
/*
* Defines a content type format in which protobuf message will be
* serialized/deserialized from(to) the Java been. It can be native protobuf
* format or json fields representation. The default value is 'native'.
*/
public void setContentTypeFormat(String contentTypeFormat) {
StringHelper.notEmpty(contentTypeFormat, "ProtobufDataFormat contentTypeFormat");
this.contentTypeFormat = contentTypeFormat;
}
/*
* (non-Javadoc)
* @see org.apache.camel.spi.DataFormat#marshal(org.apache.camel.Exchange,
* java.lang.Object, java.io.OutputStream)
*/
@Override
public void marshal(final Exchange exchange, final Object graph, final OutputStream outputStream) throws Exception {
final Message inputMessage = convertGraphToMessage(exchange, graph);
String contentTypeHeader = CONTENT_TYPE_HEADER_NATIVE;
if (contentTypeFormat.equals(CONTENT_TYPE_FORMAT_JSON)) {
IOUtils.write(JsonFormat.printer().print(inputMessage), outputStream, StandardCharsets.UTF_8);
contentTypeHeader = CONTENT_TYPE_HEADER_JSON;
} else if (contentTypeFormat.equals(CONTENT_TYPE_FORMAT_NATIVE)) {
inputMessage.writeTo(outputStream);
} else {
throw new CamelException("Invalid protobuf content type format: " + contentTypeFormat);
}
if (isContentTypeHeader()) {
exchange.getMessage().setHeader(Exchange.CONTENT_TYPE, contentTypeHeader);
}
}
private Message convertGraphToMessage(final Exchange exchange, final Object inputData)
throws NoTypeConversionAvailableException {
if (!(inputData instanceof Message)) {
// we just need to make sure input data is not a proto type
final Map<?, ?> messageInMap
= exchange.getContext().getTypeConverter().tryConvertTo(Map.class, exchange, inputData);
if (messageInMap != null) {
return ProtobufConverter.toProto(messageInMap, defaultInstance);
}
}
return exchange.getContext().getTypeConverter().mandatoryConvertTo(Message.class, exchange, inputData);
}
/*
* (non-Javadoc)
* @see org.apache.camel.spi.DataFormat#unmarshal(org.apache.camel.Exchange,
* java.io.InputStream)
*/
@Override
public Object unmarshal(final Exchange exchange, final InputStream inputStream) throws Exception {
ObjectHelper.notNull(defaultInstance, "defaultInstance or instanceClassName must be set", this);
Builder builder = defaultInstance.newBuilderForType();
if (contentTypeFormat.equals(CONTENT_TYPE_FORMAT_JSON)) {
JsonFormat.parser().ignoringUnknownFields().merge(new InputStreamReader(inputStream), builder);
} else if (contentTypeFormat.equals(CONTENT_TYPE_FORMAT_NATIVE)) {
builder = defaultInstance.newBuilderForType().mergeFrom(inputStream);
} else {
throw new CamelException("Invalid protobuf content type format: " + contentTypeFormat);
}
if (!builder.isInitialized()) {
// TODO which exception should be thrown here?
throw new InvalidPayloadException(exchange, defaultInstance.getClass());
}
return builder.build();
}
protected Message loadDefaultInstance(final String className, final CamelContext context)
throws CamelException, ClassNotFoundException {
Class<?> instanceClass = context.getClassResolver().resolveMandatoryClass(className);
if (Message.class.isAssignableFrom(instanceClass)) {
try {
Method method = instanceClass.getMethod("getDefaultInstance");
return (Message) method.invoke(null);
} catch (final Exception ex) {
throw new CamelException(
"Can't set the defaultInstance of ProtobufferDataFormat with " + className + ", caused by " + ex);
}
} else {
throw new CamelException(
"Can't set the defaultInstance of ProtobufferDataFormat with " + className
+ ", as the | ProtobufDataFormat |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/vectors/es93/DirectIOCapableLucene99FlatVectorsFormat.java | {
"start": 1957,
"end": 3925
} | class ____ extends DirectIOCapableFlatVectorsFormat {
static final String NAME = "Lucene99FlatVectorsFormat";
private final FlatVectorsScorer vectorsScorer;
/** Constructs a format */
public DirectIOCapableLucene99FlatVectorsFormat(FlatVectorsScorer vectorsScorer) {
super(NAME);
this.vectorsScorer = vectorsScorer;
}
@Override
public FlatVectorsScorer flatVectorsScorer() {
return vectorsScorer;
}
@Override
protected FlatVectorsReader createReader(SegmentReadState state) throws IOException {
return new Lucene99FlatVectorsReader(state, vectorsScorer);
}
@Override
public FlatVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException {
return new Lucene99FlatVectorsWriter(state, vectorsScorer);
}
@Override
public FlatVectorsReader fieldsReader(SegmentReadState state, boolean useDirectIO) throws IOException {
if (state.context.context() == IOContext.Context.DEFAULT && useDirectIO && canUseDirectIO(state)) {
// only override the context for the random-access use case
SegmentReadState directIOState = new SegmentReadState(
state.directory,
state.segmentInfo,
state.fieldInfos,
new DirectIOContext(state.context.hints()),
state.segmentSuffix
);
// Use mmap for merges and direct I/O for searches.
return new MergeReaderWrapper(
new Lucene99FlatBulkScoringVectorsReader(
directIOState,
new Lucene99FlatVectorsReader(directIOState, vectorsScorer),
vectorsScorer
),
new Lucene99FlatVectorsReader(state, vectorsScorer)
);
} else {
return new Lucene99FlatVectorsReader(state, vectorsScorer);
}
}
static | DirectIOCapableLucene99FlatVectorsFormat |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/conditional/expression/ConditionalWithSourceToTargetExpressionMapper.java | {
"start": 418,
"end": 1090
} | interface ____ {
ConditionalWithSourceToTargetExpressionMapper INSTANCE =
Mappers.getMapper( ConditionalWithSourceToTargetExpressionMapper.class );
@Mapping(source = "orderDTO", target = "customer",
conditionExpression = "java(Util.mapCustomerFromOrder( orderDTO ))")
Order convertToOrder(OrderDTO orderDTO);
@Mapping(source = "customerName", target = "name")
@Mapping(source = "orderDTO", target = "address",
conditionExpression = "java(Util.mapAddressFromOrder( orderDTO ))")
Customer convertToCustomer(OrderDTO orderDTO);
Address convertToAddress(OrderDTO orderDTO);
| ConditionalWithSourceToTargetExpressionMapper |
java | netty__netty | testsuite/src/main/java/io/netty/testsuite/transport/socket/DatagramMulticastIPv6Test.java | {
"start": 891,
"end": 1364
} | class ____ extends DatagramMulticastTest {
@Override
public void testMulticast(Bootstrap sb, Bootstrap cb) throws Throwable {
// Not works on windows atm.
// See https://github.com/netty/netty/issues/11285
assumeFalse(PlatformDependent.isWindows());
super.testMulticast(sb, cb);
}
@Override
protected SocketProtocolFamily socketProtocolFamily() {
return SocketProtocolFamily.INET6;
}
}
| DatagramMulticastIPv6Test |
java | apache__spark | sql/connect/client/jvm/src/test/java/org/apache/spark/sql/JavaEncoderSuite.java | {
"start": 1450,
"end": 3913
} | class ____ implements Serializable {
private static SparkSession spark;
@BeforeAll
public static void setup() {
Assumptions.assumeTrue(IntegrationTestUtils.isAssemblyJarsDirExists(),
"Skipping all tests because assembly jars directory does not exist.");
spark = SparkConnectServerUtils.createSparkSession();
}
@AfterAll
public static void tearDown() {
if (spark != null) {
spark.stop();
spark = null;
SparkConnectServerUtils.stop();
}
}
private static BigDecimal bigDec(long unscaled, int scale) {
return BigDecimal.valueOf(unscaled, scale);
}
private <T> Dataset<T> dataset(Encoder<T> encoder, T... elements) {
return spark.createDataset(Arrays.asList(elements), encoder);
}
@Test
public void testSimpleEncoders() {
final Column v = col("value");
assertFalse(
dataset(BOOLEAN(), false, true, false).select(every(v)).as(BOOLEAN()).head());
assertEquals(
7L,
dataset(BYTE(), (byte) -120, (byte)127).select(sum(v)).as(LONG()).head().longValue());
assertEquals(
(short) 16,
dataset(SHORT(), (short)16, (short)2334).select(min(v)).as(SHORT()).head().shortValue());
assertEquals(
10L,
dataset(INT(), 1, 2, 3, 4).select(sum(v)).as(LONG()).head().longValue());
assertEquals(
96L,
dataset(LONG(), 77L, 19L).select(sum(v)).as(LONG()).head().longValue());
assertEquals(
0.12f,
dataset(FLOAT(), 0.12f, 0.3f, 44f).select(min(v)).as(FLOAT()).head(),
0.0001f);
assertEquals(
789d,
dataset(DOUBLE(), 789d, 12.213d, 10.01d).select(max(v)).as(DOUBLE()).head(),
0.0001f);
assertEquals(
bigDec(1002, 2),
dataset(DECIMAL(), bigDec(1000, 2), bigDec(2, 2))
.select(sum(v)).as(DECIMAL()).head().setScale(2));
}
@Test
public void testRowEncoder() {
final StructType schema = new StructType()
.add("a", "int")
.add("b", "string");
final Dataset<Row> df = spark.range(3)
.map(new MapFunction<Long, Row>() {
@Override
public Row call(Long i) {
return create(i.intValue(), "s" + i);
}
},
Encoders.row(schema))
.filter(col("a").geq(1));
final List<Row> expected = Arrays.asList(create(1, "s1"), create(2, "s2"));
Assertions.assertEquals(expected, df.collectAsList());
}
}
| JavaEncoderSuite |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/creation/bytebuddy/InlineDelegateByteBuddyMockMaker.java | {
"start": 30728,
"end": 34868
} | class ____ what leads to infinite loops");
}
bytecodeGenerator.mockClassStatic(type);
Map<Class<?>, MockMethodInterceptor> interceptors = mockedStatics.get();
if (interceptors == null) {
interceptors = new WeakHashMap<>();
mockedStatics.set(interceptors);
}
mockedStatics.getBackingMap().expungeStaleEntries();
return new InlineStaticMockControl<>(type, interceptors, settings, handler);
}
@Override
public <T> ConstructionMockControl<T> createConstructionMock(
Class<T> type,
Function<MockedConstruction.Context, MockCreationSettings<T>> settingsFactory,
Function<MockedConstruction.Context, MockHandler<T>> handlerFactory,
MockedConstruction.MockInitializer<T> mockInitializer) {
if (type == Object.class) {
throw new MockitoException(
"It is not possible to mock construction of the Object class "
+ "to avoid inference with default object constructor chains");
} else if (type.isPrimitive() || Modifier.isAbstract(type.getModifiers())) {
throw new MockitoException(
"It is not possible to construct primitive types or abstract types: "
+ type.getName());
}
bytecodeGenerator.mockClassConstruction(type);
Map<Class<?>, BiConsumer<Object, MockedConstruction.Context>> interceptors =
mockedConstruction.get();
if (interceptors == null) {
interceptors = new WeakHashMap<>();
mockedConstruction.set(interceptors);
}
mockedConstruction.getBackingMap().expungeStaleEntries();
return new InlineConstructionMockControl<>(
type, settingsFactory, handlerFactory, mockInitializer, interceptors);
}
@Override
@SuppressWarnings("unchecked")
public <T> T newInstance(Class<T> cls) throws InstantiationException {
Constructor<?>[] constructors = cls.getDeclaredConstructors();
if (constructors.length == 0) {
throw new InstantiationException(cls.getName() + " does not define a constructor");
}
Constructor<?> selected = constructors[0];
for (Constructor<?> constructor : constructors) {
if (Modifier.isPublic(constructor.getModifiers())) {
selected = constructor;
break;
}
}
Class<?>[] types = selected.getParameterTypes();
Object[] arguments = new Object[types.length];
int index = 0;
for (Class<?> type : types) {
arguments[index++] = makeStandardArgument(type);
}
MemberAccessor accessor = Plugins.getMemberAccessor();
try {
return (T)
accessor.newInstance(
selected,
callback -> {
currentMocking.set(cls);
try {
return callback.newInstance();
} finally {
currentMocking.remove();
}
},
arguments);
} catch (Exception e) {
throw new InstantiationException("Could not instantiate " + cls.getName(), e);
}
}
private Object makeStandardArgument(Class<?> type) {
if (type == boolean.class) {
return false;
} else if (type == byte.class) {
return (byte) 0;
} else if (type == short.class) {
return (short) 0;
} else if (type == char.class) {
return (char) 0;
} else if (type == int.class) {
return 0;
} else if (type == long.class) {
return 0L;
} else if (type == float.class) {
return 0f;
} else if (type == double.class) {
return 0d;
} else {
return null;
}
}
private static | loading |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java | {
"start": 1495,
"end": 8022
} | class ____ {
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
private static final File tmpDir = GenericTestUtils.getTestDir("creds");
/* The default JCEKS provider - for testing purposes */
private String jceksProvider;
private void assertOutputContains(String expected) {
Assertions.assertThat(outContent.toString())
.contains(expected);
}
@BeforeEach
public void setup() throws Exception {
System.setOut(new PrintStream(outContent));
System.setErr(new PrintStream(errContent));
final Path jksPath = new Path(tmpDir.toString(), "keystore.jceks");
new File(jksPath.toString()).delete();
jceksProvider = "jceks://file" + jksPath.toUri();
}
@Test
public void testCredentialSuccessfulLifecycle() throws Exception {
outContent.reset();
String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", "-provider",
jceksProvider};
int rc = 0;
CredentialShell cs = new CredentialShell();
cs.setConf(new Configuration());
rc = cs.run(args1);
assertEquals(0, rc, outContent.toString());
assertTrue(outContent.toString().contains("credential1 has been successfully " +
"created."));
assertTrue(outContent.toString()
.contains(ProviderUtils.NO_PASSWORD_WARN));
assertTrue(outContent.toString()
.contains(ProviderUtils.NO_PASSWORD_INSTRUCTIONS_DOC));
assertTrue(outContent.toString()
.contains(ProviderUtils.NO_PASSWORD_CONT));
outContent.reset();
String[] args2 = {"list", "-provider",
jceksProvider};
rc = cs.run(args2);
assertEquals(0, rc);
assertTrue(outContent.toString().contains("credential1"));
outContent.reset();
String[] args4 = {"delete", "credential1", "-f", "-provider",
jceksProvider};
rc = cs.run(args4);
assertEquals(0, rc);
assertTrue(outContent.toString().contains("credential1 has been successfully " +
"deleted."));
outContent.reset();
String[] args5 = {"list", "-provider",
jceksProvider};
rc = cs.run(args5);
assertEquals(0, rc);
assertFalse(outContent.toString().contains("credential1"), outContent.toString());
}
@Test
public void testInvalidProvider() throws Exception {
String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", "-provider",
"sdff://file/tmp/credstore.jceks"};
int rc = 0;
CredentialShell cs = new CredentialShell();
cs.setConf(new Configuration());
rc = cs.run(args1);
assertEquals(1, rc);
assertTrue(outContent.toString().contains(
CredentialShell.NO_VALID_PROVIDERS));
}
@Test
public void testTransientProviderWarning() throws Exception {
String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", "-provider",
"user:///"};
int rc = 0;
CredentialShell cs = new CredentialShell();
cs.setConf(new Configuration());
rc = cs.run(args1);
assertEquals(0, rc, outContent.toString());
assertTrue(outContent.toString().contains("WARNING: you are modifying a " +
"transient provider."));
String[] args2 = {"delete", "credential1", "-f", "-provider", "user:///"};
rc = cs.run(args2);
assertEquals(0, rc, outContent.toString());
assertTrue(outContent.toString().contains("credential1 has been successfully " +
"deleted."));
}
@Test
public void testTransientProviderOnlyConfig() throws Exception {
String[] args1 = {"create", "credential1"};
int rc = 0;
CredentialShell cs = new CredentialShell();
Configuration config = new Configuration();
config.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, "user:///");
cs.setConf(config);
rc = cs.run(args1);
assertEquals(1, rc);
assertTrue(outContent.toString().contains(
CredentialShell.NO_VALID_PROVIDERS));
}
@Test
public void testPromptForCredentialWithEmptyPasswd() throws Exception {
String[] args1 = {"create", "credential1", "-provider",
jceksProvider};
ArrayList<String> passwords = new ArrayList<String>();
passwords.add(null);
passwords.add("p@ssw0rd");
int rc = 0;
CredentialShell shell = new CredentialShell();
shell.setConf(new Configuration());
shell.setPasswordReader(new MockPasswordReader(passwords));
rc = shell.run(args1);
assertEquals(1, rc, outContent.toString());
assertTrue(outContent.toString().contains("Passwords don't match"));
}
@Test
public void testPromptForCredentialNotFound() throws Exception {
String[] args1 = {"check", "credential1", "-provider",
jceksProvider};
ArrayList<String> password = new ArrayList<String>();
password.add("p@ssw0rd");
int rc = 0;
CredentialShell shell = new CredentialShell();
shell.setConf(new Configuration());
shell.setPasswordReader(new MockPasswordReader(password));
rc = shell.run(args1);
assertEquals(0, rc);
assertOutputContains("Password match failed for credential1.");
}
@Test
public void testPromptForCredential() throws Exception {
String[] args1 = {"create", "credential1", "-provider",
jceksProvider};
ArrayList<String> passwords = new ArrayList<String>();
passwords.add("p@ssw0rd");
passwords.add("p@ssw0rd");
int rc = 0;
CredentialShell shell = new CredentialShell();
shell.setConf(new Configuration());
shell.setPasswordReader(new MockPasswordReader(passwords));
rc = shell.run(args1);
assertEquals(0, rc);
assertOutputContains("credential1 has been successfully created.");
String[] args2 = {"check", "credential1", "-provider",
jceksProvider};
ArrayList<String> password = new ArrayList<String>();
password.add("p@ssw0rd");
shell.setPasswordReader(new MockPasswordReader(password));
rc = shell.run(args2);
assertEquals(0, rc);
assertOutputContains("Password match success for credential1.");
ArrayList<String> passwordError = new ArrayList<String>();
passwordError.add("p@ssw0rderr");
shell.setPasswordReader(new MockPasswordReader(password));
rc = shell.run(args2);
assertEquals(0, rc);
assertOutputContains("Password match failed for credential1.");
String[] args3 = {"delete", "credential1", "-f", "-provider",
jceksProvider};
rc = shell.run(args3);
assertEquals(0, rc);
assertOutputContains("credential1 has been successfully deleted.");
}
public | TestCredShell |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/transformer/ByteArrayDataTypeTransformer.java | {
"start": 1240,
"end": 1629
} | class ____ extends Transformer {
private static final Transformer DELEGATE = new TypeConverterTransformer(byte[].class);
@Override
public void transform(Message message, DataType from, DataType to) throws Exception {
DELEGATE.transform(message, from, to);
message.setHeader(Exchange.CONTENT_TYPE, "application/octet-stream");
}
}
| ByteArrayDataTypeTransformer |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/UnknownProducerIdException.java | {
"start": 1278,
"end": 1439
} | class ____ extends OutOfOrderSequenceException {
public UnknownProducerIdException(String message) {
super(message);
}
}
| UnknownProducerIdException |
java | elastic__elasticsearch | modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java | {
"start": 2595,
"end": 11959
} | class ____ extends ESTestCase {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<RankEvalResponse, Void> PARSER = new ConstructingObjectParser<>(
"rank_eval_response",
true,
a -> new RankEvalResponse(
(Double) a[0],
((List<EvalQueryQuality>) a[1]).stream().collect(Collectors.toMap(EvalQueryQuality::getId, Function.identity())),
((List<Tuple<String, Exception>>) a[2]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2))
)
);
static {
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), EvalQueryQuality.METRIC_SCORE_FIELD);
PARSER.declareNamedObjects(
ConstructingObjectParser.optionalConstructorArg(),
(p, c, n) -> EvalQueryQualityTests.parseInstance(p, n),
new ParseField("details")
);
PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, p.nextToken(), p);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p);
Tuple<String, ElasticsearchException> tuple = new Tuple<>(n, ElasticsearchException.failureFromXContent(p));
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p);
return tuple;
}, new ParseField("failures"));
}
private static final Exception[] RANDOM_EXCEPTIONS = new Exception[] {
new ClusterBlockException(singleton(NoMasterBlockService.NO_MASTER_BLOCK_WRITES)),
new CircuitBreakingException("Data too large", 123, 456, CircuitBreaker.Durability.PERMANENT),
new IllegalArgumentException("Closed resource", new RuntimeException("Resource")),
new SearchPhaseExecutionException(
"search",
"all shards failed",
new ShardSearchFailure[] {
new ShardSearchFailure(
new ParsingException(1, 2, "foobar", null),
new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null)
) }
),
new ElasticsearchException(
"Parsing failed",
new ParsingException(9, 42, "Wrong state", new NullPointerException("Unexpected null value"))
) };
private static RankEvalResponse createRandomResponse() {
int numberOfRequests = randomIntBetween(0, 5);
Map<String, EvalQueryQuality> partials = Maps.newMapWithExpectedSize(numberOfRequests);
for (int i = 0; i < numberOfRequests; i++) {
String id = randomAlphaOfLengthBetween(3, 10);
EvalQueryQuality evalQuality = new EvalQueryQuality(id, randomDoubleBetween(0.0, 1.0, true));
int numberOfDocs = randomIntBetween(0, 5);
List<RatedSearchHit> ratedHits = new ArrayList<>(numberOfDocs);
for (int d = 0; d < numberOfDocs; d++) {
ratedHits.add(searchHit(randomAlphaOfLength(10), randomIntBetween(0, 1000), randomIntBetween(0, 10)));
}
evalQuality.addHitsAndRatings(ratedHits);
partials.put(id, evalQuality);
}
int numberOfErrors = randomIntBetween(0, 2);
Map<String, Exception> errors = Maps.newMapWithExpectedSize(numberOfRequests);
for (int i = 0; i < numberOfErrors; i++) {
errors.put(randomAlphaOfLengthBetween(3, 10), randomFrom(RANDOM_EXCEPTIONS));
}
return new RankEvalResponse(randomDouble(), partials, errors);
}
public void testSerialization() throws IOException {
RankEvalResponse randomResponse = createRandomResponse();
try (BytesStreamOutput output = new BytesStreamOutput()) {
randomResponse.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
RankEvalResponse deserializedResponse = new RankEvalResponse(in);
assertEquals(randomResponse.getMetricScore(), deserializedResponse.getMetricScore(), 0.0000000001);
assertEquals(randomResponse.getPartialResults(), deserializedResponse.getPartialResults());
assertEquals(randomResponse.getFailures().keySet(), deserializedResponse.getFailures().keySet());
assertNotSame(randomResponse, deserializedResponse);
assertEquals(-1, in.read());
}
}
}
public void testXContentParsing() throws IOException {
RankEvalResponse testItem = createRandomResponse();
boolean humanReadable = randomBoolean();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference originalBytes = toShuffledXContent(testItem, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
// skip inserting random fields for:
// - the `details` section, which can contain arbitrary queryIds
// - everything under `failures` (exceptions parsing is quiet lenient)
// - everything under `hits` (we test lenient SearchHit parsing elsewhere)
Predicate<String> pathsToExclude = path -> (path.endsWith("details") || path.contains("failures") || path.contains("hits"));
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random());
RankEvalResponse parsedItem;
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
parsedItem = PARSER.apply(parser, null);
assertNull(parser.nextToken());
}
assertNotSame(testItem, parsedItem);
// We cannot check equality of object here because some information (e.g.
// SearchHit#shard) cannot fully be parsed back.
assertEquals(testItem.getMetricScore(), parsedItem.getMetricScore(), 0.0);
assertEquals(testItem.getPartialResults().keySet(), parsedItem.getPartialResults().keySet());
for (EvalQueryQuality metricDetail : testItem.getPartialResults().values()) {
EvalQueryQuality parsedEvalQueryQuality = parsedItem.getPartialResults().get(metricDetail.getId());
assertToXContentEquivalent(
toXContent(metricDetail, xContentType, humanReadable),
toXContent(parsedEvalQueryQuality, xContentType, humanReadable),
xContentType
);
}
// Also exceptions that are parsed back will be different since they are re-wrapped during parsing.
// However, we can check that there is the expected number
assertEquals(testItem.getFailures().keySet(), parsedItem.getFailures().keySet());
for (String queryId : testItem.getFailures().keySet()) {
Exception ex = parsedItem.getFailures().get(queryId);
assertThat(ex, instanceOf(ElasticsearchException.class));
}
}
public void testToXContent() throws IOException {
EvalQueryQuality coffeeQueryQuality = new EvalQueryQuality("coffee_query", 0.1);
coffeeQueryQuality.addHitsAndRatings(Arrays.asList(searchHit("index", 123, 5), searchHit("index", 456, null)));
RankEvalResponse response = new RankEvalResponse(
0.123,
Collections.singletonMap("coffee_query", coffeeQueryQuality),
Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg"))
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString();
assertEquals(XContentHelper.stripWhitespace("""
{
"metric_score": 0.123,
"details": {
"coffee_query": {
"metric_score": 0.1,
"unrated_docs": [ { "_index": "index", "_id": "456" } ],
"hits": [
{
"hit": {
"_index": "index",
"_id": "123",
"_score": 1.0
},
"rating": 5
},
{
"hit": {
"_index": "index",
"_id": "456",
"_score": 1.0
},
"rating": null
}
]
}
},
"failures": {
"beer_query": {
"error": {
"root_cause": [ { "type": "parsing_exception", "reason": "someMsg", "line": 0, "col": 0 } ],
"type": "parsing_exception",
"reason": "someMsg",
"line": 0,
"col": 0
}
}
}
}"""), xContent);
}
private static RatedSearchHit searchHit(String index, int docId, Integer rating) {
SearchHit hit = SearchHit.unpooled(docId, docId + "");
hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null));
hit.score(1.0f);
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());
}
}
| RankEvalResponseTests |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java | {
"start": 1132,
"end": 2685
} | class ____ extends org.apache.avro.specific.SpecificRecordBase
implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 1031933828916876443L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"FooBarSpecificRecord\",\"namespace\":\"org.apache.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"nicknames\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"relatedids\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"typeEnum\",\"type\":[\"null\",{\"type\":\"enum\",\"name\":\"TypeEnum\",\"symbols\":[\"a\",\"b\",\"c\"]}],\"default\":null}]}");
public static org.apache.avro.Schema getClassSchema() {
return SCHEMA$;
}
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<FooBarSpecificRecord> ENCODER = new BinaryMessageEncoder<>(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder<FooBarSpecificRecord> DECODER = new BinaryMessageDecoder<>(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageDecoder instance used by this class.
*
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder<FooBarSpecificRecord> getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this | FooBarSpecificRecord |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PublicApiNamedStreamShouldReturnStreamTest.java | {
"start": 3466,
"end": 3761
} | class ____ {}
// BUG: Diagnostic contains: PublicApiNamedStreamShouldReturnStream
public TestStreamRandomSuffix stream() {
return new TestStreamRandomSuffix();
}
}
""")
.doTest();
}
}
| TestStreamRandomSuffix |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/AfterThrowingAdviceBindingTests.java | {
"start": 1323,
"end": 3445
} | class ____ {
private ClassPathXmlApplicationContext ctx;
private ITestBean testBean;
private AfterThrowingAdviceBindingTestAspect afterThrowingAdviceAspect;
private AfterThrowingAdviceBindingCollaborator mockCollaborator = mock();
@BeforeEach
void setup() {
this.ctx = new ClassPathXmlApplicationContext(getClass().getSimpleName() + ".xml", getClass());
testBean = (ITestBean) ctx.getBean("testBean");
afterThrowingAdviceAspect = (AfterThrowingAdviceBindingTestAspect) ctx.getBean("testAspect");
afterThrowingAdviceAspect.setCollaborator(mockCollaborator);
}
@AfterEach
void tearDown() {
this.ctx.close();
}
@Test
void simpleAfterThrowing() {
assertThatExceptionOfType(Throwable.class).isThrownBy(() ->
this.testBean.exceptional(new Throwable()));
verify(mockCollaborator).noArgs();
}
@Test
void afterThrowingWithBinding() {
Throwable t = new Throwable();
assertThatExceptionOfType(Throwable.class).isThrownBy(() ->
this.testBean.exceptional(t));
verify(mockCollaborator).oneThrowable(t);
}
@Test
void afterThrowingWithNamedTypeRestriction() {
Throwable t = new Throwable();
assertThatExceptionOfType(Throwable.class).isThrownBy(() ->
this.testBean.exceptional(t));
verify(mockCollaborator).noArgs();
verify(mockCollaborator).oneThrowable(t);
verify(mockCollaborator).noArgsOnThrowableMatch();
}
@Test
void afterThrowingWithRuntimeExceptionBinding() {
RuntimeException ex = new RuntimeException();
assertThatExceptionOfType(Throwable.class).isThrownBy(() ->
this.testBean.exceptional(ex));
verify(mockCollaborator).oneRuntimeException(ex);
}
@Test
void afterThrowingWithTypeSpecified() {
assertThatExceptionOfType(Throwable.class).isThrownBy(() ->
this.testBean.exceptional(new Throwable()));
verify(mockCollaborator).noArgsOnThrowableMatch();
}
@Test
void afterThrowingWithRuntimeTypeSpecified() {
assertThatExceptionOfType(Throwable.class).isThrownBy(() ->
this.testBean.exceptional(new RuntimeException()));
verify(mockCollaborator).noArgsOnRuntimeExceptionMatch();
}
}
final | AfterThrowingAdviceBindingTests |
java | apache__kafka | raft/src/main/java/org/apache/kafka/raft/ExpirationService.java | {
"start": 886,
"end": 1438
} | interface ____ {
/**
* Get a new completable future which will automatically fail exceptionally with a
* {@link org.apache.kafka.common.errors.TimeoutException} if not completed before
* the provided time limit expires.
*
* @param timeoutMs the duration in milliseconds before the future is completed exceptionally
* @param <T> arbitrary future type (the service must set no expectation on this type)
* @return the completable future
*/
<T> CompletableFuture<T> failAfter(long timeoutMs);
}
| ExpirationService |
java | spring-projects__spring-security | test/src/main/java/org/springframework/security/test/context/TestSecurityContextHolder.java | {
"start": 2739,
"end": 4747
} | class ____ {
private static final ThreadLocal<SecurityContext> contextHolder = new ThreadLocal<>();
private TestSecurityContextHolder() {
}
/**
* Clears the {@link SecurityContext} from {@link TestSecurityContextHolder} and
* {@link SecurityContextHolder}.
*/
public static void clearContext() {
contextHolder.remove();
SecurityContextHolder.clearContext();
}
/**
* Gets the {@link SecurityContext} from {@link TestSecurityContextHolder}.
* @return the {@link SecurityContext} from {@link TestSecurityContextHolder}.
*/
public static SecurityContext getContext() {
SecurityContext ctx = contextHolder.get();
if (ctx == null) {
ctx = getDefaultContext();
contextHolder.set(ctx);
}
return ctx;
}
/**
* Sets the {@link SecurityContext} on {@link TestSecurityContextHolder} and
* {@link SecurityContextHolder}.
* @param context the {@link SecurityContext} to use
*/
public static void setContext(SecurityContext context) {
Assert.notNull(context, "Only non-null SecurityContext instances are permitted");
contextHolder.set(context);
SecurityContextHolder.setContext(context);
}
/**
* Creates a new {@link SecurityContext} with the given {@link Authentication}. The
* {@link SecurityContext} is set on {@link TestSecurityContextHolder} and
* {@link SecurityContextHolder}.
* @param authentication the {@link Authentication} to use
* @since 5.1.1
*/
public static void setAuthentication(Authentication authentication) {
Assert.notNull(authentication, "Only non-null Authentication instances are permitted");
SecurityContext context = SecurityContextHolder.createEmptyContext();
context.setAuthentication(authentication);
setContext(context);
}
/**
* Gets the default {@link SecurityContext} by delegating to the
* {@link SecurityContextHolder}
* @return the default {@link SecurityContext}
*/
private static SecurityContext getDefaultContext() {
return SecurityContextHolder.getContext();
}
}
| TestSecurityContextHolder |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/beans/factory/xml/support/CustomNamespaceHandlerTests.java | {
"start": 9086,
"end": 9501
} | class ____ extends AbstractSingleBeanDefinitionParser {
@Override
protected Class<?> getBeanClass(Element element) {
return TestBean.class;
}
@Override
protected void doParse(Element element, BeanDefinitionBuilder builder) {
builder.addPropertyValue("name", element.getAttribute("name"));
builder.addPropertyValue("age", element.getAttribute("age"));
}
}
private static | PersonDefinitionParser |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerAppReport.java | {
"start": 1262,
"end": 2115
} | class ____ {
private final Collection<RMContainer> live;
private final Collection<RMContainer> reserved;
private final boolean pending;
public SchedulerAppReport(SchedulerApplicationAttempt app) {
this.live = app.getLiveContainers();
this.reserved = app.getReservedContainers();
this.pending = app.isPending();
}
/**
* Get the list of live containers
* @return All of the live containers
*/
public Collection<RMContainer> getLiveContainers() {
return live;
}
/**
* Get the list of reserved containers
* @return All of the reserved containers.
*/
public Collection<RMContainer> getReservedContainers() {
return reserved;
}
/**
* Is this application pending?
* @return true if it is else false.
*/
public boolean isPending() {
return pending;
}
}
| SchedulerAppReport |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.