language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/ContainerLaunchFailAppMaster.java | {
"start": 1394,
"end": 2600
} | class ____
extends ApplicationMaster.NMCallbackHandler {
public FailContainerLaunchNMCallbackHandler(
ApplicationMaster applicationMaster) {
super(applicationMaster);
}
@Override
public void onContainerStarted(ContainerId containerId,
Map<String, ByteBuffer> allServiceResponse) {
super.onStartContainerError(containerId,
new RuntimeException("Inject Container Launch failure"));
}
}
public static void main(String[] args) {
boolean result = false;
try {
ContainerLaunchFailAppMaster appMaster =
new ContainerLaunchFailAppMaster();
LOG.info("Initializing ApplicationMaster");
boolean doRun = appMaster.init(args);
if (!doRun) {
System.exit(0);
}
appMaster.run();
result = appMaster.finish();
} catch (Throwable t) {
LOG.error("Error running ApplicationMaster", t);
System.exit(1);
}
if (result) {
LOG.info("Application Master completed successfully. exiting");
System.exit(0);
} else {
LOG.info("Application Master failed. exiting");
System.exit(2);
}
}
}
| FailContainerLaunchNMCallbackHandler |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/ProgramDescription.java | {
"start": 922,
"end": 1148
} | interface ____ a Program to have a description of the plan which can be shown
* to the user. For a more detailed description of what should be included in the description see
* getDescription().
*/
@PublicEvolving
public | allows |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractInputStreamAssert.java | {
"start": 2383,
"end": 22456
} | class ____<SELF extends AbstractInputStreamAssert<SELF, ACTUAL>, ACTUAL extends InputStream>
extends AbstractAssertWithComparator<SELF, ACTUAL> {
private final Diff diff = new Diff();
private final BinaryDiff binaryDiff = new BinaryDiff();
protected AbstractInputStreamAssert(ACTUAL actual, Class<?> selfType) {
super(actual, selfType);
}
/**
* Converts the content of the actual {@link InputStream} to a {@link String} by decoding its bytes using the given charset
* and returns assertions for the computed String allowing String specific assertions from this call.
* <p>
* <b>Warning: this will consume the whole input stream in case the underlying
* implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Example :
* <pre><code class='java'> InputStream abcInputStream = new ByteArrayInputStream("abc".getBytes());
*
* // assertion succeeds
* assertThat(abcInputStream).asString(UTF_8)
* .startsWith("a");
*
* // assertion fails
* assertThat(abcInputStream).asString(UTF_8)
* .startsWith("e");</code></pre>
*
* @param charset the {@link Charset} to interpret the {@code InputStream}'s content to a String
* @return a string assertion object.
* @throws NullPointerException if the given {@code Charset} is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is {@code null}.
* @throws UncheckedIOException if an I/O error occurs.
* @since 3.20.0
*/
@CheckReturnValue
public AbstractStringAssert<?> asString(Charset charset) {
isNotNull();
return assertThat(asString(actual, charset));
}
private String asString(InputStream actual, Charset charset) {
requireNonNull(charset, shouldNotBeNull("charset")::create);
return wrapWithMarkAndReset(actual, () -> new String(readAllBytes(actual), charset));
}
private static byte[] readAllBytes(InputStream is) {
try {
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] data = new byte[1024];
for (int length; (length = is.read(data)) != -1;) {
os.write(data, 0, length);
}
return os.toByteArray();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Verifies that the content of the actual {@code InputStream} is equal to the content of the given one.
* <p>
* <b>Warning: this will consume the whole input streams in case the underlying
* implementations do not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new ByteArrayInputStream(new byte[] {0xa})).hasSameContentAs(new ByteArrayInputStream(new byte[] {0xa}));
*
* // assertions will fail
* assertThat(new ByteArrayInputStream(new byte[] {0xa})).hasSameContentAs(new ByteArrayInputStream(new byte[] {}));
* assertThat(new ByteArrayInputStream(new byte[] {0xa})).hasSameContentAs(new ByteArrayInputStream(new byte[] {0xa, 0xc, 0xd}));</code></pre>
*
* @param expected the given {@code InputStream} to compare the actual {@code InputStream} to.
* @return {@code this} assertion object.
* @throws NullPointerException if the given {@code InputStream} is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is {@code null}.
* @throws AssertionError if the content of the actual {@code InputStream} is not equal to the content of the given one.
* @throws UncheckedIOException if an I/O error occurs.
*/
public SELF hasSameContentAs(InputStream expected) {
isNotNull();
assertHasSameContentAs(expected);
return myself;
}
private void assertHasSameContentAs(InputStream expected) {
requireNonNull(expected, shouldNotBeNull("expected")::create);
wrapWithMarkAndReset(actual, () -> wrapWithMarkAndReset(expected, () -> {
try {
List<Delta<String>> diffs = diff.diff(actual, expected);
if (!diffs.isEmpty()) throw assertionError(shouldHaveSameContent(actual, expected, diffs));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}));
}
/**
* Verifies that the content of the actual {@code InputStream} is empty.
* <p>
* <b>Warning: this will consume the first byte of the input stream in case
* the underlying implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new ByteArrayInputStream(new byte[] {})).isEmpty());
*
* // assertions will fail
* assertThat(new ByteArrayInputStream(new byte[] {0xa})).isEmpty(); </code></pre>
*
* @return {@code this} assertion object.
* @throws NullPointerException if the given {@code InputStream} is {@code null}.
* @throws AssertionError if the content of the actual {@code InputStream} is not empty.
* @throws UncheckedIOException if an I/O error occurs.
* @since 3.17.0
*/
public SELF isEmpty() {
isNotNull();
assertIsEmpty();
return myself;
}
private void assertIsEmpty() {
wrapWithMarkAndReset(actual, () -> {
try {
if (actual.read() != -1) throw assertionError(shouldBeEmpty(actual));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
}
/**
* Verifies that the content of the actual {@code InputStream} is not empty.
* <p>
* <b>Warning: this will consume the first byte of the input stream in case
* the underlying implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new ByteArrayInputStream(new byte[] {0xa})).isNotEmpty());
*
* // assertions will fail
* assertThat(new ByteArrayInputStream(new byte[] {})).isNotEmpty();</code></pre>
*
* @return {@code this} assertion object.
* @throws NullPointerException if the given {@code InputStream} is {@code null}.
* @throws AssertionError if the content of the actual {@code InputStream} is empty.
* @throws UncheckedIOException if an I/O error occurs.
* @since 3.17.0
*/
public SELF isNotEmpty() {
isNotNull();
assertIsNotEmpty();
return myself;
}
private void assertIsNotEmpty() {
wrapWithMarkAndReset(actual, () -> {
try {
if (actual.read() == -1) throw assertionError(shouldNotBeEmpty());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
}
/**
* Verifies that the content of the actual {@code InputStream} is equal to the given {@code String} <b>except for newlines which are ignored</b>.
* <p>
* This will change in AssertJ 4.0 where newlines will be taken into account, in the meantime, to get this behavior
* one can use {@link #asString(Charset)} and then chain with {@link AbstractStringAssert#isEqualTo(String)}.
* <p>
* <b>Warning: this will consume the whole input stream in case the underlying
* implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new ByteArrayInputStream("a".getBytes())).hasContent("a");
*
* // assertions will fail
* assertThat(new ByteArrayInputStream("a".getBytes())).hasContent("");
* assertThat(new ByteArrayInputStream("a".getBytes())).hasContent("ab");</code></pre>
*
* @param expected the given {@code String} to compare the actual {@code InputStream} to.
* @return {@code this} assertion object.
* @throws NullPointerException if the given {@code String} is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is {@code null}.
* @throws AssertionError if the content of the actual {@code InputStream} is not equal to the given {@code String}.
* @throws UncheckedIOException if an I/O error occurs.
* @since 3.11.0
*/
public SELF hasContent(String expected) {
isNotNull();
assertHasContent(expected);
return myself;
}
private void assertHasContent(String expected) {
requireNonNull(expected, shouldNotBeNull("expected")::create);
wrapWithMarkAndReset(actual, () -> {
try {
List<Delta<String>> diffs = diff.diff(actual, expected);
if (!diffs.isEmpty()) throw assertionError(shouldHaveSameContent(actual, expected, diffs));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
}
/**
* Verifies that the binary content of the actual {@code InputStream} is <b>exactly</b> equal to the given one.
* <p>
* <b>Warning: this will consume the whole input stream in case the underlying
* implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Example:
* <pre><code class='java'> InputStream inputStream = new ByteArrayInputStream(new byte[] {1, 2});
*
* // assertion will pass
* assertThat(inputStream).hasContent(new byte[] {1, 2});
*
* // assertions will fail
* assertThat(inputStream).hasBinaryContent(new byte[] { });
* assertThat(inputStream).hasBinaryContent(new byte[] {0, 0});</code></pre>
*
* @param expected the expected binary content to compare the actual {@code InputStream}'s content to.
* @return {@code this} assertion object.
* @throws NullPointerException if the given content is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is {@code null}.
* @throws AssertionError if the content of the actual {@code InputStream} is not equal to the given binary content.
* @throws UncheckedIOException if an I/O error occurs.
* @since 3.16.0
*/
public SELF hasBinaryContent(byte[] expected) {
isNotNull();
assertHasBinaryContent(expected);
return myself;
}
private void assertHasBinaryContent(byte[] expected) {
requireNonNull(expected, shouldNotBeNull("expected")::create);
wrapWithMarkAndReset(actual, () -> {
try {
BinaryDiffResult result = binaryDiff.diff(actual, expected);
if (!result.hasNoDiff()) throw assertionError(shouldHaveBinaryContent(actual, result));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
}
/**
* Verifies that the tested {@link InputStream} digest (calculated with the specified {@link MessageDigest}) is equal to the given one.
* <p>
* <b>Warning: this will consume the whole input stream in case the underlying
* implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Examples:
* <pre><code class="java"> // assume that assertj-core-2.9.0.jar was downloaded from https://repo1.maven.org/maven2/org/assertj/assertj-core/2.9.0/assertj-core-2.9.0.jar
* InputStream tested = new FileInputStream(new File("assertj-core-2.9.0.jar"));
*
* // The following assertions succeed:
* assertThat(tested).hasDigest(MessageDigest.getInstance("SHA1"), new byte[]{92, 90, -28, 91, 88, -15, 32, 35, -127, 122, -66, 73, 36, 71, -51, -57, -111, 44, 26, 44});
* assertThat(tested).hasDigest(MessageDigest.getInstance("MD5"), new byte[]{-36, -77, 1, 92, -46, -124, 71, 100, 76, -127, 10, -13, 82, -125, 44, 25});
*
* // The following assertions fail:
* assertThat(tested).hasDigest(MessageDigest.getInstance("SHA1"), "93b9ced2ee5b3f0f4c8e640e77470dab031d4cad".getBytes());
* assertThat(tested).hasDigest(MessageDigest.getInstance("MD5"), "3735dff8e1f9df0492a34ef075205b8f".getBytes());</code></pre>
*
* @param algorithm the MessageDigest used to calculate the digests.
* @param expected the expected binary content to compare the actual {@code InputStream}'s digest to.
* @return {@code this} assertion object.
* @throws NullPointerException if the given algorithm is {@code null}.
* @throws NullPointerException if the given digest is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is not readable.
* @throws UncheckedIOException if an I/O error occurs.
* @throws AssertionError if the content of the tested {@code InputStream}'s digest is not equal to the given one.
* @since 3.11.0
*/
public SELF hasDigest(MessageDigest algorithm, byte[] expected) {
isNotNull();
assertHasDigest(algorithm, expected);
return myself;
}
/**
* Verifies that the tested {@link InputStream} digest (calculated with the specified {@link MessageDigest}) is equal to the given one.
* <p>
* <b>Warning: this will consume the whole input stream in case the underlying
* implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Examples:
* <pre><code class="java"> // assume that assertj-core-2.9.0.jar was downloaded from https://repo1.maven.org/maven2/org/assertj/assertj-core/2.9.0/assertj-core-2.9.0.jar
* InputStream tested = new FileInputStream(new File("assertj-core-2.9.0.jar"));
*
* // The following assertions succeed:
* assertThat(tested).hasDigest(MessageDigest.getInstance("SHA1"), "5c5ae45b58f12023817abe492447cdc7912c1a2c");
* assertThat(tested).hasDigest(MessageDigest.getInstance("MD5"), "dcb3015cd28447644c810af352832c19");
*
* // The following assertions fail:
* assertThat(tested).hasDigest(MessageDigest.getInstance("SHA1"), "93b9ced2ee5b3f0f4c8e640e77470dab031d4cad");
* assertThat(tested).hasDigest(MessageDigest.getInstance("MD5"), "3735dff8e1f9df0492a34ef075205b8f");</code></pre>
*
* @param algorithm the MessageDigest used to calculate the digests.
* @param digest the expected binary content to compare the actual {@code InputStream}'s digest to.
* @return {@code this} assertion object.
* @throws NullPointerException if the given algorithm is {@code null}.
* @throws NullPointerException if the given digest is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is not readable.
* @throws UncheckedIOException if an I/O error occurs.
* @throws AssertionError if the content of the tested {@code InputStream}'s digest is not equal to the given one.
* @since 3.11.0
*/
public SELF hasDigest(MessageDigest algorithm, String digest) {
isNotNull();
assertHasDigest(algorithm, Digests.fromHex(digest));
return myself;
}
/**
* Verifies that the tested {@link InputStream} digest (calculated with the specified algorithm) is equal to the given one.
* <p>
* <b>Warning: this will consume the whole input stream in case the underlying
* implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Examples:
* <pre><code class="java"> // assume that assertj-core-2.9.0.jar was downloaded from https://repo1.maven.org/maven2/org/assertj/assertj-core/2.9.0/assertj-core-2.9.0.jar
* InputStream tested = new FileInputStream(new File("assertj-core-2.9.0.jar"));
*
* // The following assertion succeeds:
* assertThat(tested).hasDigest("SHA1", new byte[]{92, 90, -28, 91, 88, -15, 32, 35, -127, 122, -66, 73, 36, 71, -51, -57, -111, 44, 26, 44});
* assertThat(tested).hasDigest("MD5", new byte[]{-36, -77, 1, 92, -46, -124, 71, 100, 76, -127, 10, -13, 82, -125, 44, 25});
*
* // The following assertion fails:
* assertThat(tested).hasDigest("SHA1", "93b9ced2ee5b3f0f4c8e640e77470dab031d4cad".getBytes());
* assertThat(tested).hasDigest("MD5", "3735dff8e1f9df0492a34ef075205b8f".getBytes()); </code></pre>
*
* @param algorithm the algorithm used to calculate the digests.
* @param expected the expected binary content to compare the actual {@code InputStream}'s content to.
* @return {@code this} assertion object.
* @throws NullPointerException if the given algorithm is {@code null}.
* @throws NullPointerException if the given digest is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is not readable.
* @throws UncheckedIOException if an I/O error occurs.
* @throws AssertionError if the content of the tested {@code InputStream}'s digest is not equal to the given one.
* @since 3.11.0
*/
public SELF hasDigest(String algorithm, byte[] expected) {
isNotNull();
assertHasDigest(algorithm, expected);
return myself;
}
/**
* Verifies that the tested {@link InputStream} digest (calculated with the specified algorithm) is equal to the given one.
* <p>
* <b>Warning: this will consume the whole input stream in case the underlying
* implementation does not support {@link InputStream#markSupported() marking}.</b>
* <p>
* Examples:
* <pre><code class="java"> // assume that assertj-core-2.9.0.jar was downloaded from https://repo1.maven.org/maven2/org/assertj/assertj-core/2.9.0/assertj-core-2.9.0.jar
* InputStream tested = new FileInputStream(new File("assertj-core-2.9.0.jar"));
*
* // The following assertion succeeds:
* assertThat(tested).hasDigest("SHA1", "5c5ae45b58f12023817abe492447cdc7912c1a2c");
* assertThat(tested).hasDigest("MD5", "dcb3015cd28447644c810af352832c19");
*
* // The following assertion fails:
* assertThat(tested).hasDigest("SHA1", "93b9ced2ee5b3f0f4c8e640e77470dab031d4cad");
* assertThat(tested).hasDigest("MD5", "3735dff8e1f9df0492a34ef075205b8f"); </code></pre>
*
* @param algorithm the algorithm used to calculate the digests.
* @param digest the expected binary content to compare the actual {@code InputStream}'s content to.
* @return {@code this} assertion object.
* @throws NullPointerException if the given algorithm is {@code null}.
* @throws NullPointerException if the given digest is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is {@code null}.
* @throws AssertionError if the actual {@code InputStream} is not readable.
* @throws UncheckedIOException if an I/O error occurs.
* @throws AssertionError if the content of the tested {@code InputStream}'s digest is not equal to the given one.
* @since 3.11.0
*/
public SELF hasDigest(String algorithm, String digest) {
isNotNull();
assertHasDigest(algorithm, digest);
return myself;
}
private void assertHasDigest(String algorithm, String digest) {
requireNonNull(digest, shouldNotBeNull("digest")::create);
assertHasDigest(algorithm, Digests.fromHex(digest));
}
private void assertHasDigest(String algorithm, byte[] digest) {
requireNonNull(algorithm, shouldNotBeNull("algorithm")::create);
try {
assertHasDigest(MessageDigest.getInstance(algorithm), digest);
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException(e);
}
}
private void assertHasDigest(MessageDigest algorithm, byte[] digest) {
requireNonNull(algorithm, shouldNotBeNull("algorithm")::create);
requireNonNull(digest, shouldNotBeNull("digest")::create);
wrapWithMarkAndReset(actual, () -> {
try {
DigestDiff diff = digestDiff(actual, algorithm, digest);
if (diff.digestsDiffer()) throw assertionError(shouldHaveDigest(actual, diff));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
}
private static void wrapWithMarkAndReset(InputStream inputStream, Runnable runnable) {
wrapWithMarkAndReset(inputStream, () -> {
runnable.run();
return null;
});
}
private static <T> T wrapWithMarkAndReset(InputStream inputStream, Supplier<T> callable) {
if (!inputStream.markSupported()) {
return callable.get();
}
inputStream.mark(Integer.MAX_VALUE);
try {
return callable.get();
} finally {
try {
inputStream.reset();
} catch (IOException ignored) {}
}
}
}
| AbstractInputStreamAssert |
java | apache__kafka | metadata/src/test/java/org/apache/kafka/metadata/FakeKafkaConfigSchema.java | {
"start": 1494,
"end": 2454
} | class ____ {
public static final Map<ConfigResource.Type, ConfigDef> CONFIGS = new HashMap<>();
static {
CONFIGS.put(BROKER, new ConfigDef().
define("unclean.leader.election.enable", BOOLEAN, "false", HIGH, "").
define("min.insync.replicas", INT, "1", HIGH, ""));
CONFIGS.put(TOPIC, new ConfigDef().
define("unclean.leader.election.enable", BOOLEAN, "false", HIGH, "").
define("min.insync.replicas", INT, "1", HIGH, ""));
}
public static final Map<String, List<ConfigSynonym>> SYNONYMS = new HashMap<>();
static {
SYNONYMS.put("unclean.leader.election.enable",
List.of(new ConfigSynonym("unclean.leader.election.enable")));
SYNONYMS.put("min.insync.replicas",
List.of(new ConfigSynonym("min.insync.replicas")));
}
public static final KafkaConfigSchema INSTANCE = new KafkaConfigSchema(CONFIGS, SYNONYMS);
}
| FakeKafkaConfigSchema |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/a/IncomingDataPointBenchmark_file_double.java | {
"start": 288,
"end": 961
} | class ____ {
static String json;
public static void main(String[] args) throws Exception {
File file = new File("/Users/wenshao/Downloads/datalist_double");
json = FileUtils.readFileToString(file);
for (int i = 0; i < 10; ++i) {
perf();
}
}
public static void perf() {
long start = System.currentTimeMillis();
for (int i = 0; i < 1000; ++i) {
JSON.parseArray(json, IncomingDataPoint_double.class);
}
long millis = System.currentTimeMillis() - start;
System.out.println("IncomingDataPoint_double millis : " + millis);
}
}
| IncomingDataPointBenchmark_file_double |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/config/ExecutionConfigOptions.java | {
"start": 60113,
"end": 61257
} | enum ____ implements DescribedEnum {
IGNORE(
text(
"Don't apply any trimming and padding, and instead "
+ "ignore the CHAR/VARCHAR/BINARY/VARBINARY length directive.")),
TRIM_PAD(
text(
"Trim and pad string and binary values to match the length "
+ "defined by the CHAR/VARCHAR/BINARY/VARBINARY length.")),
ERROR(
text(
"Throw a runtime exception when writing data into a "
+ "CHAR/VARCHAR/BINARY/VARBINARY column which does not match the length"
+ " constraint"));
private final InlineElement description;
TypeLengthEnforcer(InlineElement description) {
this.description = description;
}
@Internal
@Override
public InlineElement getDescription() {
return description;
}
}
/** The enforcer to check the constraints on nested types. */
@PublicEvolving
public | TypeLengthEnforcer |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java | {
"start": 374,
"end": 490
} | class ____ {
// Implements version parsing after https://github.com/oracle/graal/pull/6302
static final | GraalVM |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/tx/JmsTransactedDeadLetterChannelHandlerRollbackOnExceptionIT.java | {
"start": 1891,
"end": 2109
} | class ____ extends CamelTestSupport {
@RegisterExtension
public static ArtemisService service = ArtemisServiceFactory.createVMService();
public static | JmsTransactedDeadLetterChannelHandlerRollbackOnExceptionIT |
java | quarkusio__quarkus | integration-tests/hibernate-search-orm-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/analysis/Analysis3TestingEntity.java | {
"start": 293,
"end": 620
} | class ____ extends AnalysisTestingEntityBase {
public Analysis3TestingEntity() {
}
public Analysis3TestingEntity(String text) {
super(text);
}
@FullTextField(analyzer = "index-level-analyzer-3")
@Override
public String getText() {
return super.getText();
}
}
| Analysis3TestingEntity |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java | {
"start": 10234,
"end": 10584
} | class ____ {
private final LocalFileSystem localFileSystem;
private final FsPermission expectedPermission;
public CheckContext(LocalFileSystem localFileSystem,
FsPermission expectedPermission) {
this.localFileSystem = localFileSystem;
this.expectedPermission = expectedPermission;
}
}
}
| CheckContext |
java | apache__camel | core/camel-console/src/generated/java/org/apache/camel/impl/console/GarbageCollectorDevConsoleConfigurer.java | {
"start": 720,
"end": 2512
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("CamelContext", org.apache.camel.CamelContext.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.impl.console.GarbageCollectorDevConsole target = (org.apache.camel.impl.console.GarbageCollectorDevConsole) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "camelcontext":
case "camelContext": target.setCamelContext(property(camelContext, org.apache.camel.CamelContext.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "camelcontext":
case "camelContext": return org.apache.camel.CamelContext.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.impl.console.GarbageCollectorDevConsole target = (org.apache.camel.impl.console.GarbageCollectorDevConsole) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "camelcontext":
case "camelContext": return target.getCamelContext();
default: return null;
}
}
}
| GarbageCollectorDevConsoleConfigurer |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java | {
"start": 20601,
"end": 24519
} | class ____ extends FSDataInputStream {
private FileSystem fs;
private Path file;
private long fileLen = -1L;
FSDataBoundedInputStream(FileSystem fs, Path file, InputStream in) {
super(in);
this.fs = fs;
this.file = file;
}
@Override
public boolean markSupported() {
return false;
}
/* Return the file length */
private long getFileLength() throws IOException {
if( fileLen==-1L ) {
fileLen = fs.getContentSummary(file).getLength();
}
return fileLen;
}
/**
* Skips over and discards <code>n</code> bytes of data from the
* input stream.
*
*The <code>skip</code> method skips over some smaller number of bytes
* when reaching end of file before <code>n</code> bytes have been skipped.
* The actual number of bytes skipped is returned. If <code>n</code> is
* negative, no bytes are skipped.
*
* @param n the number of bytes to be skipped.
* @return the actual number of bytes skipped.
* @exception IOException if an I/O error occurs.
* ChecksumException if the chunk to skip to is corrupted
*/
@Override
public synchronized long skip(long n) throws IOException {
long curPos = getPos();
long fileLength = getFileLength();
if( n+curPos > fileLength ) {
n = fileLength - curPos;
}
return super.skip(n);
}
/**
* Seek to the given position in the stream.
* The next read() will be from that position.
*
* <p>This method does not allow seek past the end of the file.
* This produces IOException.
*
* @param pos the postion to seek to.
* @exception IOException if an I/O error occurs or seeks after EOF
* ChecksumException if the chunk to seek to is corrupted
*/
@Override
public synchronized void seek(long pos) throws IOException {
if (pos > getFileLength()) {
throw new EOFException("Cannot seek after EOF");
}
super.seek(pos);
}
}
/**
* Opens an FSDataInputStream at the indicated Path.
* @param f the file name to open
* @param bufferSize the size of the buffer to be used.
* @throws IOException if an I/O error occurs.
*/
@Override
public FSDataInputStream open(Path f, int bufferSize) throws IOException {
FileSystem fs;
InputStream in;
if (verifyChecksum) {
fs = this;
in = new ChecksumFSInputChecker(this, f, bufferSize);
} else {
fs = getRawFileSystem();
in = fs.open(f, bufferSize);
}
return new FSDataBoundedInputStream(fs, f, in);
}
@Override
public FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException {
throw new UnsupportedOperationException("Append is not supported "
+ "by ChecksumFileSystem");
}
@Override
public boolean truncate(Path f, long newLength) throws IOException {
throw new UnsupportedOperationException("Truncate is not supported "
+ "by ChecksumFileSystem");
}
@Override
public void concat(final Path f, final Path[] psrcs) throws IOException {
throw new UnsupportedOperationException("Concat is not supported "
+ "by ChecksumFileSystem");
}
/**
* Calculated the length of the checksum file in bytes.
* @param size the length of the data file in bytes
* @param bytesPerSum the number of bytes in a checksum block
* @return the number of bytes in the checksum file
*/
public static long getChecksumLength(long size, int bytesPerSum) {
//the checksum length is equal to size passed divided by bytesPerSum +
//bytes written in the beginning of the checksum file.
return ((size + bytesPerSum - 1) / bytesPerSum) * FSInputChecker.CHECKSUM_SIZE +
ChecksumFSInputChecker.HEADER_LENGTH;
}
/** This | FSDataBoundedInputStream |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/error/ShouldHaveValue_create_Test.java | {
"start": 1300,
"end": 4134
} | class ____ {
private static final TestDescription TEST_DESCRIPTION = new TestDescription("TEST");
private Person joe;
@BeforeEach
void setup() {
joe = new Person();
joe.name = "Joe";
joe.age = 33;
joe.account = 123456789L;
}
@Test
void should_create_error_message_for_AtomicIntegerFieldUpdater() {
// GIVEN
AtomicIntegerFieldUpdater<Person> updater = AtomicIntegerFieldUpdater.newUpdater(Person.class, "age");
// WHEN
String message = shouldHaveValue(updater, 33, 20, joe).create(TEST_DESCRIPTION, CONFIGURATION_PROVIDER.representation());
// THEN
then(message).isEqualTo(format("[TEST] %n" +
"Expecting AtomicIntegerFieldUpdater to have value:%n" +
" 20%n" +
"but had:%n" +
" 33%n" +
"to update target object:%n" +
" Person [name=Joe, age=33, account=123456789]"));
}
@Test
void should_create_error_message_for_AtomicLongFieldUpdater() {
// GIVEN
AtomicLongFieldUpdater<Person> updater = AtomicLongFieldUpdater.newUpdater(Person.class, "account");
// WHEN
String message = shouldHaveValue(updater, 123456789L, 0L, joe).create(TEST_DESCRIPTION,
CONFIGURATION_PROVIDER.representation());
// THEN
then(message).isEqualTo(format("[TEST] %n" +
"Expecting AtomicLongFieldUpdater to have value:%n" +
" 0L%n" +
"but had:%n" +
" 123456789L%n" +
"to update target object:%n" +
" Person [name=Joe, age=33, account=123456789]"));
}
@Test
void should_create_error_message_for_AtomicReferenceFieldUpdater() {
// GIVEN
AtomicReferenceFieldUpdater<Person, String> updater = newUpdater(Person.class, String.class, "name");
// WHEN
String message = shouldHaveValue(updater, "Joe", "Jack", joe).create(TEST_DESCRIPTION,
CONFIGURATION_PROVIDER.representation());
// THEN
then(message).isEqualTo(format("[TEST] %n" +
"Expecting AtomicReferenceFieldUpdater to have value:%n" +
" \"Jack\"%n" +
"but had:%n" +
" \"Joe\"%n" +
"to update target object:%n" +
" Person [name=Joe, age=33, account=123456789]"));
}
private static | ShouldHaveValue_create_Test |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/type/classreading/SimpleMethodMetadataReadingVisitor.java | {
"start": 1323,
"end": 3094
} | class ____ extends MethodVisitor {
private final @Nullable ClassLoader classLoader;
private final String declaringClassName;
private final int access;
private final String methodName;
private final String descriptor;
private final List<MergedAnnotation<?>> annotations = new ArrayList<>(4);
private final Consumer<SimpleMethodMetadata> consumer;
private @Nullable Source source;
SimpleMethodMetadataReadingVisitor(@Nullable ClassLoader classLoader, String declaringClassName,
int access, String methodName, String descriptor, Consumer<SimpleMethodMetadata> consumer) {
super(SpringAsmInfo.ASM_VERSION);
this.classLoader = classLoader;
this.declaringClassName = declaringClassName;
this.access = access;
this.methodName = methodName;
this.descriptor = descriptor;
this.consumer = consumer;
}
@Override
public @Nullable AnnotationVisitor visitAnnotation(String descriptor, boolean visible) {
return MergedAnnotationReadingVisitor.get(this.classLoader, getSource(),
descriptor, visible, this.annotations::add);
}
@Override
public void visitEnd() {
String returnTypeName = Type.getReturnType(this.descriptor).getClassName();
MergedAnnotations annotations = MergedAnnotations.of(this.annotations);
SimpleMethodMetadata metadata = new SimpleMethodMetadata(this.methodName, this.access,
this.declaringClassName, returnTypeName, getSource(), annotations);
this.consumer.accept(metadata);
}
private Object getSource() {
Source source = this.source;
if (source == null) {
source = new Source(this.declaringClassName, this.methodName, this.access, this.descriptor);
this.source = source;
}
return source;
}
/**
* {@link MergedAnnotation} source.
*/
static final | SimpleMethodMetadataReadingVisitor |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/inference/SubsequenceInputTypeStrategyTest.java | {
"start": 1728,
"end": 8730
} | class ____ extends InputTypeStrategiesTestBase {
@Override
protected Stream<TestSpec> testData() {
return Stream.of(
TestSpec.forStrategy(
"A strategy used for IF ELSE with valid arguments",
InputTypeStrategies.compositeSequence()
.argument(logical(LogicalTypeRoot.BOOLEAN))
.subsequence(commonType(2))
.finish())
.calledWithArgumentTypes(
DataTypes.BOOLEAN(), DataTypes.SMALLINT(), DataTypes.DECIMAL(10, 2))
.expectSignature("f(<BOOLEAN>, <COMMON>, <COMMON>)")
.expectArgumentTypes(
DataTypes.BOOLEAN(),
DataTypes.DECIMAL(10, 2),
DataTypes.DECIMAL(10, 2)),
TestSpec.forStrategy(
"Strategy fails if any of the nested strategies fail",
InputTypeStrategies.compositeSequence()
.argument(logical(LogicalTypeRoot.BOOLEAN))
.subsequence(commonType(2))
.finish())
.calledWithArgumentTypes(
DataTypes.BOOLEAN(), DataTypes.VARCHAR(3), DataTypes.DECIMAL(10, 2))
.expectErrorMessage(
"Could not find a common type for arguments: [VARCHAR(3), DECIMAL(10, 2)]"),
TestSpec.forStrategy(
"Strategy with a varying argument",
InputTypeStrategies.compositeSequence()
.argument(logical(LogicalTypeRoot.BOOLEAN))
.subsequence(commonType(2))
.finishWithVarying(
varyingSequence(logical(LogicalTypeRoot.BIGINT))))
.calledWithArgumentTypes(
DataTypes.BOOLEAN(),
DataTypes.SMALLINT(),
DataTypes.DECIMAL(10, 2),
DataTypes.SMALLINT(),
DataTypes.BIGINT(),
DataTypes.TINYINT())
.expectSignature("f(<BOOLEAN>, <COMMON>, <COMMON>, <BIGINT>...)")
.expectArgumentTypes(
DataTypes.BOOLEAN(),
DataTypes.DECIMAL(10, 2),
DataTypes.DECIMAL(10, 2),
DataTypes.BIGINT(),
DataTypes.BIGINT(),
DataTypes.BIGINT()),
TestSpec.forStrategy(
"A complex strategy with few sub sequences",
InputTypeStrategies.compositeSequence()
.argument(logical(LogicalTypeRoot.BOOLEAN))
.subsequence(commonType(2))
.argument(explicit(DataTypes.TIME().notNull()))
.subsequence(commonType(2))
.finishWithVarying(
varyingSequence(
logical(LogicalTypeFamily.TIMESTAMP), ANY)))
.calledWithArgumentTypes(
DataTypes.BOOLEAN(),
DataTypes.SMALLINT(),
DataTypes.DECIMAL(10, 2),
DataTypes.TIME().notNull(),
DataTypes.TINYINT().notNull(),
DataTypes.DECIMAL(13, 3).notNull(),
DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE().notNull(),
DataTypes.SMALLINT(),
DataTypes.BIGINT())
.expectSignature(
"f(<BOOLEAN>, <COMMON>, <COMMON>, TIME(0) NOT NULL, <COMMON>, <COMMON>, <TIMESTAMP>, <ANY>...)")
.expectArgumentTypes(
DataTypes.BOOLEAN(),
DataTypes.DECIMAL(10, 2),
DataTypes.DECIMAL(10, 2),
DataTypes.TIME().notNull(),
DataTypes.DECIMAL(13, 3).notNull(),
DataTypes.DECIMAL(13, 3).notNull(),
DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE().notNull(),
DataTypes.SMALLINT(),
DataTypes.BIGINT()),
TestSpec.forStrategy(
"A strategy with named argument",
InputTypeStrategies.compositeSequence()
.argument("arg1", logical(LogicalTypeRoot.BOOLEAN))
.subsequence(
sequence(
Arrays.asList("arg2", "arg3"),
Arrays.asList(
logical(
LogicalTypeFamily
.INTEGER_NUMERIC),
logical(
LogicalTypeFamily
.INTEGER_NUMERIC))))
.argument(logical(LogicalTypeRoot.INTEGER))
.finish())
.calledWithArgumentTypes(
DataTypes.BOOLEAN(),
DataTypes.SMALLINT(),
DataTypes.BIGINT(),
DataTypes.INT())
.expectSignature(
"f(arg1 <BOOLEAN>, arg2 <INTEGER_NUMERIC>, arg3 <INTEGER_NUMERIC>, <INTEGER>)")
.expectArgumentTypes(
DataTypes.BOOLEAN(),
DataTypes.SMALLINT(),
DataTypes.BIGINT(),
DataTypes.INT()));
}
}
| SubsequenceInputTypeStrategyTest |
java | greenrobot__greendao | DaoCore/src/main/java/org/greenrobot/greendao/DaoLog.java | {
"start": 850,
"end": 2411
} | class ____ {
private final static String TAG = "greenDAO";
public static final int VERBOSE = 2;
public static final int DEBUG = 3;
public static final int INFO = 4;
public static final int WARN = 5;
public static final int ERROR = 6;
public static final int ASSERT = 7;
public static boolean isLoggable(int level) {
return Log.isLoggable(TAG, level);
}
public static String getStackTraceString(Throwable th) {
return Log.getStackTraceString(th);
}
public static int println(int level, String msg) {
return Log.println(level, TAG, msg);
}
public static int v(String msg) {
return Log.v(TAG, msg);
}
public static int v(String msg, Throwable th) {
return Log.v(TAG, msg, th);
}
public static int d(String msg) {
return Log.d(TAG, msg);
}
public static int d(String msg, Throwable th) {
return Log.d(TAG, msg, th);
}
public static int i(String msg) {
return Log.i(TAG, msg);
}
public static int i(String msg, Throwable th) {
return Log.i(TAG, msg, th);
}
public static int w(String msg) {
return Log.w(TAG, msg);
}
public static int w(String msg, Throwable th) {
return Log.w(TAG, msg, th);
}
public static int w(Throwable th) {
return Log.w(TAG, th);
}
public static int e(String msg) {
return Log.w(TAG, msg);
}
public static int e(String msg, Throwable th) {
return Log.e(TAG, msg, th);
}
}
| DaoLog |
java | google__auto | value/src/main/java/com/google/auto/value/processor/MethodSignature.java | {
"start": 1378,
"end": 2592
} | class ____ {
private final ExecutableType originalMethod;
private final ExecutableType rewrittenMethod;
private MethodSignature(ExecutableType originalMethod, ExecutableType rewrittenMethod) {
this.originalMethod = originalMethod;
this.rewrittenMethod = rewrittenMethod;
}
ImmutableList<AnnotatedTypeMirror> parameterTypes() {
return IntStream.range(0, originalMethod.getParameterTypes().size())
.mapToObj(
i ->
new AnnotatedTypeMirror(
originalMethod.getParameterTypes().get(i),
rewrittenMethod.getParameterTypes().get(i)))
.collect(toImmutableList());
}
AnnotatedTypeMirror returnType() {
return new AnnotatedTypeMirror(originalMethod.getReturnType(), rewrittenMethod.getReturnType());
}
static MethodSignature asMemberOf(Types typeUtils, DeclaredType in, ExecutableElement method) {
return new MethodSignature(
asExecutable(method.asType()), asExecutable(typeUtils.asMemberOf(in, method)));
}
static MethodSignature asMemberOf(Types typeUtils, TypeElement in, ExecutableElement method) {
return asMemberOf(typeUtils, asDeclared(in.asType()), method);
}
}
| MethodSignature |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/invoker/lookup/ArgumentLookupInvalidInjectionPointTest.java | {
"start": 1799,
"end": 1972
} | class ____ {
public String hello(@Named MyDependency dependency) {
return "foobar" + dependency.getId();
}
}
@Singleton
static | MyService |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java | {
"start": 1227,
"end": 2936
} | class ____ extends AcknowledgedRequest<Request> implements ToXContentObject {
public static Request parseRequest(String datafeedId, @Nullable IndicesOptions indicesOptions, XContentParser parser) {
DatafeedUpdate.Builder update = DatafeedUpdate.PARSER.apply(parser, null);
if (indicesOptions != null) {
update.setIndicesOptions(indicesOptions);
}
update.setId(datafeedId);
return new Request(update.build());
}
private DatafeedUpdate update;
public Request(DatafeedUpdate update) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
this.update = update;
}
public Request(StreamInput in) throws IOException {
super(in);
update = new DatafeedUpdate(in);
}
public DatafeedUpdate getUpdate() {
return update;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
update.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
update.toXContent(builder, params);
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(update, request.update);
}
@Override
public int hashCode() {
return Objects.hash(update);
}
}
}
| Request |
java | apache__hadoop | hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java | {
"start": 1873,
"end": 3233
} | class ____ implements JobStoryProducer {
public static final Logger LOG = LoggerFactory.getLogger(DebugJobProducer.class);
final ArrayList<JobStory> submitted;
private final Configuration conf;
private final AtomicInteger numJobs;
public DebugJobProducer(int numJobs, Configuration conf) {
super();
MockJob.reset();
this.conf = conf;
this.numJobs = new AtomicInteger(numJobs);
this.submitted = new ArrayList<JobStory>();
}
@Override
public JobStory getNextJob() throws IOException {
if (numJobs.getAndDecrement() > 0) {
final MockJob ret = new MockJob(conf);
submitted.add(ret);
return ret;
}
return null;
}
@Override
public void close() {
}
static double[] getDistr(Random r, double mindist, int size) {
assert 0.0 <= mindist && mindist <= 1.0;
final double min = mindist / size;
final double rem = 1.0 - min * size;
final double[] tmp = new double[size];
for (int i = 0; i < tmp.length - 1; ++i) {
tmp[i] = r.nextDouble() * rem;
}
tmp[tmp.length - 1] = rem;
Arrays.sort(tmp);
final double[] ret = new double[size];
ret[0] = tmp[0] + min;
for (int i = 1; i < size; ++i) {
ret[i] = tmp[i] - tmp[i - 1] + min;
}
return ret;
}
/**
* Generate random task data for a synthetic job.
*/
static | DebugJobProducer |
java | resilience4j__resilience4j | resilience4j-spring-boot2/src/main/java/io/github/resilience4j/retry/autoconfigure/RetryConfigurationOnMissingBean.java | {
"start": 1127,
"end": 1805
} | class ____ extends AbstractRetryConfigurationOnMissingBean {
/**
* The EventConsumerRegistry is used to manage EventConsumer instances. The
* EventConsumerRegistry is used by the Retry events monitor to show the latest RetryEvent
* events for each Retry instance.
*
* @return a default EventConsumerRegistry {@link DefaultEventConsumerRegistry}
*/
@Bean
@ConditionalOnMissingBean(value = RetryEvent.class, parameterizedContainer = EventConsumerRegistry.class)
public EventConsumerRegistry<RetryEvent> retryEventConsumerRegistry() {
return retryConfiguration.retryEventConsumerRegistry();
}
}
| RetryConfigurationOnMissingBean |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/annotation/Configurations.java | {
"start": 2312,
"end": 4724
} | class ____ {
private static final Comparator<Object> COMPARATOR = OrderComparator.INSTANCE
.thenComparing((other) -> other.getClass().getName());
private final @Nullable UnaryOperator<Collection<Class<?>>> sorter;
private final Set<Class<?>> classes;
private final @Nullable Function<Class<?>, String> beanNameGenerator;
/**
* Create a new {@link Configurations} instance.
* @param classes the configuration classes
*/
protected Configurations(Collection<Class<?>> classes) {
Assert.notNull(classes, "'classes' must not be null");
this.sorter = null;
this.classes = Collections.unmodifiableSet(new LinkedHashSet<>(classes));
this.beanNameGenerator = null;
}
/**
* Create a new {@link Configurations} instance.
* @param sorter a {@link UnaryOperator} used to sort the configurations
* @param classes the configuration classes
* @param beanNameGenerator an optional function used to generate the bean name
* @since 3.4.0
*/
protected Configurations(@Nullable UnaryOperator<Collection<Class<?>>> sorter, Collection<Class<?>> classes,
@Nullable Function<Class<?>, String> beanNameGenerator) {
Assert.notNull(classes, "'classes' must not be null");
this.sorter = (sorter != null) ? sorter : UnaryOperator.identity();
Collection<Class<?>> sorted = this.sorter.apply(classes);
this.classes = Collections.unmodifiableSet(new LinkedHashSet<>(sorted));
this.beanNameGenerator = beanNameGenerator;
}
protected final Set<Class<?>> getClasses() {
return this.classes;
}
/**
* Merge configurations from another source of the same type.
* @param other the other {@link Configurations} (must be of the same type as this
* instance)
* @return a new configurations instance (must be of the same type as this instance)
*/
protected Configurations merge(Configurations other) {
Set<Class<?>> mergedClasses = new LinkedHashSet<>(getClasses());
mergedClasses.addAll(other.getClasses());
if (this.sorter != null) {
mergedClasses = new LinkedHashSet<>(this.sorter.apply(mergedClasses));
}
return merge(mergedClasses);
}
/**
* Merge configurations.
* @param mergedClasses the merged classes
* @return a new configurations instance (must be of the same type as this instance)
*/
protected abstract Configurations merge(Set<Class<?>> mergedClasses);
/**
* Return the bean name that should be used for the given configuration | Configurations |
java | apache__logging-log4j2 | log4j-jakarta-web/src/test/java/org/apache/logging/log4j/web/Log4jServletContainerInitializerTest.java | {
"start": 2065,
"end": 9628
} | class ____ {
@Mock
private ServletContext servletContext;
@Mock
private Log4jWebLifeCycle initializer;
@Captor
private ArgumentCaptor<Class<? extends Filter>> filterCaptor;
@Captor
private ArgumentCaptor<EventListener> listenerCaptor;
private Log4jServletContainerInitializer containerInitializer;
@BeforeEach
void setUp() {
this.containerInitializer = new Log4jServletContainerInitializer();
}
@Test
void testOnStartupWithServletVersion2_x() throws Exception {
given(servletContext.getMajorVersion()).willReturn(2);
this.containerInitializer.onStartup(null, this.servletContext);
}
@Test
void testOnStartupWithServletVersion3_xEffectiveVersion2_x() throws Exception {
given(servletContext.getMajorVersion()).willReturn(3);
given(servletContext.getEffectiveMajorVersion()).willReturn(2);
this.containerInitializer.onStartup(null, this.servletContext);
}
@Test
void testOnStartupWithServletVersion3_xEffectiveVersion3_xDisabledTrue() throws Exception {
given(servletContext.getMajorVersion()).willReturn(3);
given(servletContext.getEffectiveMajorVersion()).willReturn(3);
given(servletContext.getInitParameter(eq(Log4jWebSupport.IS_LOG4J_AUTO_INITIALIZATION_DISABLED)))
.willReturn("true");
this.containerInitializer.onStartup(null, this.servletContext);
}
@Test
void testOnStartupWithServletVersion3_xEffectiveVersion3_xShutdownDisabled() throws Exception {
final FilterRegistration.Dynamic registration = mock(FilterRegistration.Dynamic.class);
given(servletContext.getMajorVersion()).willReturn(3);
given(servletContext.getEffectiveMajorVersion()).willReturn(3);
given(servletContext.getInitParameter(eq(Log4jWebSupport.IS_LOG4J_AUTO_SHUTDOWN_DISABLED)))
.willReturn("true");
given(servletContext.getInitParameter(eq(Log4jWebSupport.IS_LOG4J_AUTO_INITIALIZATION_DISABLED)))
.willReturn(null);
given(servletContext.addFilter(eq("log4jServletFilter"), filterCaptor.capture()))
.willReturn(registration);
given(servletContext.getAttribute(Log4jWebSupport.SUPPORT_ATTRIBUTE)).willReturn(initializer);
this.containerInitializer.onStartup(null, this.servletContext);
then(initializer).should().start();
then(initializer).should().setLoggerContext();
then(registration).should().setAsyncSupported(eq(true));
then(registration)
.should()
.addMappingForUrlPatterns(eq(EnumSet.allOf(DispatcherType.class)), eq(false), eq("/*"));
// initParam IS_LOG4J_AUTO_SHUTDOWN_DISABLED is "true" so addListener shouldn't be called.
then(servletContext).should(never()).addListener(any(Log4jServletContextListener.class));
}
@Test
void testOnStartupWithServletVersion3_xEffectiveVersion3_xDisabledTRUE() throws Exception {
given(servletContext.getMajorVersion()).willReturn(3);
given(servletContext.getEffectiveMajorVersion()).willReturn(3);
given(servletContext.getInitParameter(eq(Log4jWebSupport.IS_LOG4J_AUTO_INITIALIZATION_DISABLED)))
.willReturn("TRUE");
this.containerInitializer.onStartup(null, this.servletContext);
}
@Test
void testOnStartupWithServletVersion3_xEffectiveVersion3_x() throws Exception {
final FilterRegistration.Dynamic registration = mock(FilterRegistration.Dynamic.class);
given(servletContext.getMajorVersion()).willReturn(3);
given(servletContext.getEffectiveMajorVersion()).willReturn(3);
given(servletContext.getInitParameter(eq(Log4jWebSupport.IS_LOG4J_AUTO_INITIALIZATION_DISABLED)))
.willReturn(null);
given(servletContext.addFilter(eq("log4jServletFilter"), filterCaptor.capture()))
.willReturn(registration);
given(servletContext.getAttribute(Log4jWebSupport.SUPPORT_ATTRIBUTE)).willReturn(initializer);
containerInitializer.onStartup(null, servletContext);
then(initializer).should().start();
then(initializer).should().setLoggerContext();
then(servletContext).should().addListener(listenerCaptor.capture());
then(registration).should().setAsyncSupported(eq(true));
then(registration)
.should()
.addMappingForUrlPatterns(eq(EnumSet.allOf(DispatcherType.class)), eq(false), eq("/*"));
assertNotNull(listenerCaptor.getValue(), "The listener should not be null.");
assertSame(
Log4jServletContextListener.class,
listenerCaptor.getValue().getClass(),
"The listener is not correct.");
assertNotNull(filterCaptor.getValue(), "The filter should not be null.");
assertSame(Log4jServletFilter.class, filterCaptor.getValue(), "The filter is not correct.");
}
@Test
void testOnStartupCanceledDueToPreExistingFilter() throws Exception {
given(servletContext.getMajorVersion()).willReturn(3);
given(servletContext.getEffectiveMajorVersion()).willReturn(3);
given(servletContext.getInitParameter(eq(Log4jWebSupport.IS_LOG4J_AUTO_INITIALIZATION_DISABLED)))
.willReturn("false");
given(servletContext.addFilter(eq("log4jServletFilter"), filterCaptor.capture()))
.willReturn(null);
this.containerInitializer.onStartup(null, this.servletContext);
assertNotNull(filterCaptor.getValue(), "The filter should not be null.");
assertSame(Log4jServletFilter.class, filterCaptor.getValue(), "The filter is not correct.");
}
@Test
void testOnStartupFailedDueToInitializerFailure() throws Exception {
final FilterRegistration.Dynamic registration = mock(FilterRegistration.Dynamic.class);
final IllegalStateException exception = new IllegalStateException(Strings.EMPTY);
given(servletContext.getMajorVersion()).willReturn(3);
given(servletContext.getEffectiveMajorVersion()).willReturn(3);
given(servletContext.getInitParameter(eq(Log4jWebSupport.IS_LOG4J_AUTO_INITIALIZATION_DISABLED)))
.willReturn("balderdash");
given(servletContext.addFilter(eq("log4jServletFilter"), filterCaptor.capture()))
.willReturn(registration);
given(servletContext.getAttribute(Log4jWebSupport.SUPPORT_ATTRIBUTE)).willReturn(initializer);
willThrow(exception).given(initializer).start();
try {
this.containerInitializer.onStartup(null, this.servletContext);
fail("Expected the exception thrown by the initializer; got no exception.");
} catch (final IllegalStateException e) {
assertSame(exception, e, "The exception is not correct.");
}
then(initializer).should().start();
assertNotNull(filterCaptor.getValue(), "The filter should not be null.");
assertSame(Log4jServletFilter.class, filterCaptor.getValue(), "The filter is not correct.");
}
@Test
void testServiceIsDetected() {
final ServiceLoader<ServletContainerInitializer> loader = ServiceLoader.load(ServletContainerInitializer.class);
final boolean found = StreamSupport.stream(loader.spliterator(), false)
.anyMatch(s -> s instanceof Log4jServletContainerInitializer);
assertTrue(found, "ServletContainerInitializer not found.");
}
}
| Log4jServletContainerInitializerTest |
java | apache__rocketmq | test/src/test/java/org/apache/rocketmq/test/autoswitchrole/AutoSwitchRoleBase.java | {
"start": 2076,
"end": 8765
} | class ____ {
protected static final String STORE_PATH_ROOT_PARENT_DIR = System.getProperty("user.home") + File.separator +
UUID.randomUUID().toString().replace("-", "");
private static final String STORE_PATH_ROOT_DIR = STORE_PATH_ROOT_PARENT_DIR + File.separator + "store";
private static final String STORE_MESSAGE = "Once, there was a chance for me!";
private static final byte[] MESSAGE_BODY = STORE_MESSAGE.getBytes();
protected static List<BrokerController> brokerList;
private static SocketAddress bornHost;
private static SocketAddress storeHost;
private static int number = 0;
protected static void initialize() {
brokerList = new ArrayList<>();
try {
storeHost = new InetSocketAddress(InetAddress.getLocalHost(), 8123);
bornHost = new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 0);
} catch (Exception ignored) {
}
}
public static int nextPort() throws IOException {
return nextPort(1001, 9999);
}
public static int nextPort(int minPort, int maxPort) throws IOException {
Random random = new Random();
int tempPort;
int port;
while (true) {
try {
tempPort = random.nextInt(maxPort) % (maxPort - minPort + 1) + minPort;
ServerSocket serverSocket = new ServerSocket(tempPort);
port = serverSocket.getLocalPort();
serverSocket.close();
break;
} catch (IOException ignored) {
if (number > 200) {
throw new IOException("This server's open ports are temporarily full!");
}
++number;
}
}
number = 0;
return port;
}
public BrokerController startBroker(String namesrvAddress, String controllerAddress, String brokerName,
int brokerId, int haPort,
int brokerListenPort,
int nettyListenPort, BrokerRole expectedRole, int mappedFileSize) throws Exception {
final MessageStoreConfig storeConfig = buildMessageStoreConfig(brokerName + "#" + brokerId, haPort, mappedFileSize);
storeConfig.setHaMaxTimeSlaveNotCatchup(3 * 1000);
final BrokerConfig brokerConfig = new BrokerConfig();
brokerConfig.setListenPort(brokerListenPort);
brokerConfig.setNamesrvAddr(namesrvAddress);
brokerConfig.setControllerAddr(controllerAddress);
brokerConfig.setSyncBrokerMetadataPeriod(2 * 1000);
brokerConfig.setCheckSyncStateSetPeriod(2 * 1000);
brokerConfig.setBrokerName(brokerName);
brokerConfig.setEnableControllerMode(true);
final NettyServerConfig nettyServerConfig = new NettyServerConfig();
nettyServerConfig.setListenPort(nettyListenPort);
final BrokerController brokerController = new BrokerController(brokerConfig, nettyServerConfig, new NettyClientConfig(), storeConfig);
assertTrue(brokerController.initialize());
brokerController.start();
brokerList.add(brokerController);
await().atMost(20, TimeUnit.SECONDS).until(() -> (expectedRole == BrokerRole.SYNC_MASTER) == brokerController.getReplicasManager().isMasterState());
return brokerController;
}
protected MessageStoreConfig buildMessageStoreConfig(final String brokerDir, final int haPort,
final int mappedFileSize) {
MessageStoreConfig storeConfig = new MessageStoreConfig();
storeConfig.setHaSendHeartbeatInterval(1000);
storeConfig.setBrokerRole(BrokerRole.SLAVE);
storeConfig.setHaListenPort(haPort);
storeConfig.setStorePathRootDir(STORE_PATH_ROOT_DIR + File.separator + brokerDir);
storeConfig.setStorePathCommitLog(STORE_PATH_ROOT_DIR + File.separator + brokerDir + File.separator + "commitlog");
storeConfig.setStorePathEpochFile(STORE_PATH_ROOT_DIR + File.separator + brokerDir + File.separator + "EpochFileCache");
storeConfig.setStorePathBrokerIdentity(STORE_PATH_ROOT_DIR + File.separator + brokerDir + File.separator + "brokerIdentity");
storeConfig.setTotalReplicas(3);
storeConfig.setInSyncReplicas(2);
storeConfig.setMappedFileSizeCommitLog(mappedFileSize);
storeConfig.setMappedFileSizeConsumeQueue(1024 * 1024);
storeConfig.setMaxHashSlotNum(10000);
storeConfig.setMaxIndexNum(100 * 100);
storeConfig.setFlushDiskType(FlushDiskType.SYNC_FLUSH);
storeConfig.setFlushIntervalConsumeQueue(1);
return storeConfig;
}
protected static ControllerConfig buildControllerConfig(final String id, final String peers) {
final ControllerConfig config = new ControllerConfig();
config.setControllerDLegerGroup("group1");
config.setControllerDLegerPeers(peers);
config.setControllerDLegerSelfId(id);
config.setMappedFileSize(1024 * 1024);
config.setControllerStorePath(STORE_PATH_ROOT_DIR + File.separator + "namesrv" + id + File.separator + "DLedgerController");
return config;
}
protected MessageExtBrokerInner buildMessage(String topic) {
MessageExtBrokerInner msg = new MessageExtBrokerInner();
msg.setTopic(topic);
msg.setTags("TAG1");
msg.setBody(MESSAGE_BODY);
msg.setKeys(String.valueOf(System.currentTimeMillis()));
msg.setQueueId(0);
msg.setSysFlag(0);
msg.setBornTimestamp(System.currentTimeMillis());
msg.setStoreHost(storeHost);
msg.setBornHost(bornHost);
msg.setPropertiesString(MessageDecoder.messageProperties2String(msg.getProperties()));
return msg;
}
protected void putMessage(MessageStore messageStore, String topic) {
// Put message on master
for (int i = 0; i < 10; i++) {
assertSame(messageStore.putMessage(buildMessage(topic)).getPutMessageStatus(), PutMessageStatus.PUT_OK);
}
}
protected void checkMessage(final MessageStore messageStore, String topic, int totalNums, int startOffset) {
await().atMost(30, TimeUnit.SECONDS)
.until(() -> {
GetMessageResult result = messageStore.getMessage("GROUP_A", topic, 0, startOffset, 1024, null);
// System.out.printf(result + "%n");
// System.out.printf("maxPhyOffset=" + messageStore.getMaxPhyOffset() + "%n");
// System.out.printf("confirmOffset=" + messageStore.getConfirmOffset() + "%n");
return result != null && result.getStatus() == GetMessageStatus.FOUND && result.getMessageCount() >= totalNums;
});
}
}
| AutoSwitchRoleBase |
java | playframework__playframework | core/play/src/main/java/play/http/HttpErrorHandler.java | {
"start": 1258,
"end": 1431
} | class ____ {
public static final TypedKey<HttpErrorInfo> HTTP_ERROR_INFO =
new TypedKey<>(play.api.http.HttpErrorHandler.Attrs$.MODULE$.HttpErrorInfo());
}
}
| Attrs |
java | apache__logging-log4j2 | log4j-api-test/src/test/java/org/apache/logging/log4j/status/StatusLoggerTest.java | {
"start": 1148,
"end": 1994
} | class ____ {
private final PrintStream origOut = System.out;
private final PrintStream origErr = System.err;
private ByteArrayOutputStream outBuf;
private ByteArrayOutputStream errBuf;
@BeforeEach
void setupStreams() {
outBuf = new ByteArrayOutputStream();
errBuf = new ByteArrayOutputStream();
System.setOut(new PrintStream(outBuf));
System.setErr(new PrintStream(errBuf));
}
@AfterEach
void resetStreams() {
System.setOut(origOut);
System.setErr(origErr);
}
@Test
void status_logger_writes_to_stderr_by_default() {
StatusLogger statusLogger = new StatusLogger();
statusLogger.error("Test message");
assertEquals("", outBuf.toString());
assertThat(errBuf.toString()).contains("Test message");
}
}
| StatusLoggerTest |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java | {
"start": 6452,
"end": 6858
} | class ____ { ",
" public void foo() {",
" int i;",
" i = - 1;",
" }",
"}");
writeFile("A.java", fileContent);
assertCompiles(literalExpressionMatches(literalHasStartPosition(59)));
}
@Test
public void getStartPositionWithWhitespaceWindows() {
String fileContent =
WINDOWS_LINE_JOINER.join(
"public | A |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/JSR166TestCase.java | {
"start": 33220,
"end": 33377
} | class ____ implements ThreadFactory {
@Override
public Thread newThread(Runnable r) {
return new Thread(r);
}
}
public | SimpleThreadFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/BootstrapTest.java | {
"start": 10221,
"end": 17428
} | class ____ JPA/Hibernate annotations for mapping.
// differs from above in that accessing the Class is deferred which is
// important if using runtime bytecode-enhancement
sources.addAnnotatedClassName("org.hibernate.example.Customer");
// Read package-level metadata.
sources.addPackage("hibernate.example");
// Read package-level metadata.
sources.addPackage(MyEntity.class.getPackage());
// Adds the named hbm.xml resource as a source: which performs the
// classpath lookup and parses the XML
sources.addResource("org/hibernate/example/Order.hbm.xml");
// Adds the named JPA orm.xml resource as a source: which performs the
// classpath lookup and parses the XML
sources.addResource("org/hibernate/example/Product.orm.xml");
// Read all mapping documents from a directory tree.
// Assumes that any file named *.hbm.xml is a mapping document.
sources.addDirectory(new File("."));
// Read mappings from a particular XML file
sources.addFile(new File("./mapping.xml"));
// Read all mappings from a jar file.
// Assumes that any file named *.hbm.xml is a mapping document.
sources.addJar(new File("./entities.jar"));
//end::bootstrap-bootstrap-native-registry-MetadataSources-example[]
}
catch (Exception ignore) {
}
}
@Test
public void test_bootstrap_bootstrap_native_metadata_source_example() {
try {
{
//tag::bootstrap-native-metadata-source-example[]
ServiceRegistry standardRegistry =
new StandardServiceRegistryBuilder().build();
MetadataSources sources = new MetadataSources(standardRegistry)
.addAnnotatedClass(MyEntity.class)
.addAnnotatedClassName("org.hibernate.example.Customer")
.addResource("org/hibernate/example/Order.hbm.xml")
.addResource("org/hibernate/example/Product.orm.xml");
//end::bootstrap-native-metadata-source-example[]
}
{
AttributeConverter myAttributeConverter = new AttributeConverter() {
@Override
public Object convertToDatabaseColumn(Object attribute) {
return null;
}
@Override
public Object convertToEntityAttribute(Object dbData) {
return null;
}
} ;
//tag::bootstrap-native-metadata-builder-example[]
ServiceRegistry standardRegistry =
new StandardServiceRegistryBuilder().build();
MetadataSources sources = new MetadataSources(standardRegistry);
MetadataBuilder metadataBuilder = sources.getMetadataBuilder();
// Use the JPA-compliant implicit naming strategy
metadataBuilder.applyImplicitNamingStrategy(
ImplicitNamingStrategyJpaCompliantImpl.INSTANCE);
// specify the schema name to use for tables, etc when none is explicitly specified
metadataBuilder.applyImplicitSchemaName("my_default_schema");
// specify a custom Attribute Converter
metadataBuilder.applyAttributeConverter(myAttributeConverter);
Metadata metadata = metadataBuilder.build();
//end::bootstrap-native-metadata-builder-example[]
}
}
catch (Exception ignore) {
}
}
@Test
public void test_bootstrap_bootstrap_native_SessionFactory_example() {
try {
{
//tag::bootstrap-native-SessionFactory-example[]
StandardServiceRegistry standardRegistry = new StandardServiceRegistryBuilder()
.configure("org/hibernate/example/hibernate.cfg.xml")
.build();
Metadata metadata = new MetadataSources(standardRegistry)
.addAnnotatedClass(MyEntity.class)
.addAnnotatedClassName("org.hibernate.example.Customer")
.addResource("org/hibernate/example/Order.hbm.xml")
.addResource("org/hibernate/example/Product.orm.xml")
.getMetadataBuilder()
.applyImplicitNamingStrategy(ImplicitNamingStrategyJpaCompliantImpl.INSTANCE)
.build();
SessionFactory sessionFactory = metadata.getSessionFactoryBuilder()
.applyBeanManager(getBeanManager())
.build();
sessionFactory.close();
//end::bootstrap-native-SessionFactory-example[]
}
{
//tag::bootstrap-native-SessionFactoryBuilder-example[]
StandardServiceRegistry standardRegistry = new StandardServiceRegistryBuilder()
.configure("org/hibernate/example/hibernate.cfg.xml")
.build();
Metadata metadata = new MetadataSources(standardRegistry)
.addAnnotatedClass(MyEntity.class)
.addAnnotatedClassName("org.hibernate.example.Customer")
.addResource("org/hibernate/example/Order.hbm.xml")
.addResource("org/hibernate/example/Product.orm.xml")
.getMetadataBuilder()
.applyImplicitNamingStrategy(ImplicitNamingStrategyJpaCompliantImpl.INSTANCE)
.build();
SessionFactoryBuilder sessionFactoryBuilder = metadata.getSessionFactoryBuilder();
// Supply a SessionFactory-level Interceptor
sessionFactoryBuilder.applyInterceptor(new CustomSessionFactoryInterceptor());
// Add a custom observer
sessionFactoryBuilder.addSessionFactoryObservers(new CustomSessionFactoryObserver());
// Apply a CDI BeanManager (for JPA event listeners)
sessionFactoryBuilder.applyBeanManager(getBeanManager());
SessionFactory sessionFactory = sessionFactoryBuilder.build();
//end::bootstrap-native-SessionFactoryBuilder-example[]
sessionFactory.close();
}
}
catch (Exception ignore) {
}
}
@Test
public void test_bootstrap_bootstrap_jpa_compliant_EntityManagerFactory_example() {
try {
//tag::bootstrap-jpa-compliant-EntityManagerFactory-example[]
// Create an EMF for our CRM persistence-unit.
EntityManagerFactory emf = Persistence.createEntityManagerFactory("CRM");
//end::bootstrap-jpa-compliant-EntityManagerFactory-example[]
} catch (Exception ignore) {}
}
@Test
public void test_bootstrap_bootstrap_native_EntityManagerFactory_example() {
try {
//tag::bootstrap-native-EntityManagerFactory-example[]
String persistenceUnitName = "CRM";
List<String> entityClassNames = new ArrayList<>();
Properties properties = new Properties();
PersistenceUnitInfoImpl persistenceUnitInfo = new PersistenceUnitInfoImpl(
persistenceUnitName,
entityClassNames,
properties
);
Map<String, Object> integrationSettings = ServiceRegistryUtil.createBaseSettings();
integrationSettings.put(
AvailableSettings.INTERCEPTOR,
new CustomSessionFactoryInterceptor()
);
EntityManagerFactoryBuilderImpl entityManagerFactoryBuilder =
new EntityManagerFactoryBuilderImpl(
new PersistenceUnitInfoDescriptor(persistenceUnitInfo),
integrationSettings
);
EntityManagerFactory emf = entityManagerFactoryBuilder.build();
//end::bootstrap-native-EntityManagerFactory-example[]
}
catch (Exception ignore) {
}
}
@Test
@JiraKey("HHH-17154")
public void build_EntityManagerFactory_with_NewTempClassLoader() {
new EntityManagerFactoryBuilderImpl(
new PersistenceUnitInfoDescriptor(
new PersistenceUnitInfoImpl( "", new ArrayList<>(), new Properties() ) {
@Override
public ClassLoader getNewTempClassLoader() {
return Thread.currentThread().getContextClassLoader();
}
}
),
ServiceRegistryUtil.createBaseSettings()
).cancel();
}
public Object getBeanManager() {
return null;
}
@Entity
public static | using |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeResponse.java | {
"start": 465,
"end": 738
} | class ____ extends OpenIndexResponse {
public FreezeResponse(StreamInput in) throws IOException {
super(in);
}
public FreezeResponse(boolean acknowledged, boolean shardsAcknowledged) {
super(acknowledged, shardsAcknowledged);
}
}
| FreezeResponse |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/XmlsecuritySignComponentBuilderFactory.java | {
"start": 1937,
"end": 10082
} | interface ____ extends ComponentBuilder<XmlSignerComponent> {
/**
* In order to protect the KeyInfo element from tampering you can add a
* reference to the signed info element so that it is protected via the
* signature value. The default value is true. Only relevant when a
* KeyInfo is returned by KeyAccessor. and KeyInfo#getId() is not null.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: producer
*
* @param addKeyInfoReference the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder addKeyInfoReference(java.lang.Boolean addKeyInfoReference) {
doSetProperty("addKeyInfoReference", addKeyInfoReference);
return this;
}
/**
* You can set a base URI which is used in the URI dereferencing.
* Relative URIs are then concatenated with the base URI.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param baseUri the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder baseUri(java.lang.String baseUri) {
doSetProperty("baseUri", baseUri);
return this;
}
/**
* Canonicalization method used to canonicalize the SignedInfo element
* before the digest is calculated. You can use the helper methods
* XmlSignatureHelper.getCanonicalizationMethod(String algorithm) or
* getCanonicalizationMethod(String algorithm, List
* inclusiveNamespacePrefixes) to create a canonicalization method.
*
* The option is a:
* <code>javax.xml.crypto.AlgorithmMethod</code> type.
*
* Default: http://www.w3.org/TR/2001/REC-xml-c14n-20010315
* Group: producer
*
* @param canonicalizationMethod the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder canonicalizationMethod(javax.xml.crypto.AlgorithmMethod canonicalizationMethod) {
doSetProperty("canonicalizationMethod", canonicalizationMethod);
return this;
}
/**
* Determines if the XML signature specific headers be cleared after
* signing and verification. Defaults to true.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: producer
*
* @param clearHeaders the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder clearHeaders(java.lang.Boolean clearHeaders) {
doSetProperty("clearHeaders", clearHeaders);
return this;
}
/**
* Sets the content object Id attribute value. By default a UUID is
* generated. If you set the null value, then a new UUID will be
* generated. Only used in the enveloping case.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param contentObjectId the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder contentObjectId(java.lang.String contentObjectId) {
doSetProperty("contentObjectId", contentObjectId);
return this;
}
/**
* Type of the content reference. The default value is null. This value
* can be overwritten by the header
* XmlSignatureConstants#HEADER_CONTENT_REFERENCE_TYPE.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param contentReferenceType the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder contentReferenceType(java.lang.String contentReferenceType) {
doSetProperty("contentReferenceType", contentReferenceType);
return this;
}
/**
* Reference URI for the content to be signed. Only used in the
* enveloped case. If the reference URI contains an ID attribute value,
* then the resource schema URI ( setSchemaResourceUri(String)) must
* also be set because the schema validator will then find out which
* attributes are ID attributes. Will be ignored in the enveloping or
* detached case.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param contentReferenceUri the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder contentReferenceUri(java.lang.String contentReferenceUri) {
doSetProperty("contentReferenceUri", contentReferenceUri);
return this;
}
/**
* Sets the crypto context properties. See {link
* XMLCryptoContext#setProperty(String, Object)}. Possible properties
* are defined in XMLSignContext an XMLValidateContext (see Supported
* Properties). The following properties are set by default to the value
* Boolean#TRUE for the XML validation. If you want to switch these
* features off you must set the property value to Boolean#FALSE.
* org.jcp.xml.dsig.validateManifests
* javax.xml.crypto.dsig.cacheReference.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
*
* Group: producer
*
* @param cryptoContextProperties the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder cryptoContextProperties(java.util.Map<java.lang.String, java.lang.Object> cryptoContextProperties) {
doSetProperty("cryptoContextProperties", cryptoContextProperties);
return this;
}
/**
* Digest algorithm URI. Optional parameter. This digest algorithm is
* used for calculating the digest of the input message. If this digest
* algorithm is not specified then the digest algorithm is calculated
* from the signature algorithm. Example:
* http://www.w3.org/2001/04/xmlenc#sha256.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param digestAlgorithm the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder digestAlgorithm(java.lang.String digestAlgorithm) {
doSetProperty("digestAlgorithm", digestAlgorithm);
return this;
}
/**
* Disallows that the incoming XML document contains DTD DOCTYPE
* declaration. The default value is Boolean#TRUE.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: producer
*
* @param disallowDoctypeDecl the value to set
* @return the dsl builder
*/
default XmlsecuritySignComponentBuilder disallowDoctypeDecl(java.lang.Boolean disallowDoctypeDecl) {
doSetProperty("disallowDoctypeDecl", disallowDoctypeDecl);
return this;
}
/**
* For the signing process, a private key is necessary. You specify a
* key accessor bean which provides this private key. The key accessor
* bean must implement the KeyAccessor interface. The package
* org.apache.camel.component.xmlsecurity.api contains the default
* implementation | XmlsecuritySignComponentBuilder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/gateway/MetadataStateFormat.java | {
"start": 13680,
"end": 23335
} | class ____ theirs specific state.
*/
public abstract T fromXContent(XContentParser parser) throws IOException;
/**
* Reads the state from a given file and compares the expected version against the actual version of
* the state.
*/
public final T read(NamedXContentRegistry namedXContentRegistry, Path file) throws IOException {
try (Directory dir = newDirectory(file.getParent())) {
try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) {
// We checksum the entire file before we even go and parse it. If it's corrupted we barf right here.
CodecUtil.checksumEntireFile(indexInput);
final int format = CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, CURRENT_VERSION);
final XContentType xContentType;
if (format < LE_VERSION) {
xContentType = XContentType.values()[Integer.reverseBytes(indexInput.readInt())];
} else {
xContentType = XContentType.values()[indexInput.readInt()];
}
if (xContentType != FORMAT) {
throw new IllegalStateException("expected state in " + file + " to be " + FORMAT + " format but was " + xContentType);
}
long filePointer = indexInput.getFilePointer();
long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer;
try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) {
try (
XContentParser parser = XContentFactory.xContent(FORMAT)
.createParser(
namedXContentRegistry,
LoggingDeprecationHandler.INSTANCE,
new InputStreamIndexInput(slice, contentSize)
)
) {
return fromXContent(parser);
}
}
} catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) {
// we trick this into a dedicated exception with the original stacktrace
throw new CorruptStateException(ex);
}
}
}
protected Directory newDirectory(Path dir) throws IOException {
return new NIOFSDirectory(dir);
}
/**
* Clean ups all state files not matching passed generation.
*
* @param currentGeneration state generation to keep.
* @param locations state paths.
*/
public void cleanupOldFiles(final long currentGeneration, Path[] locations) {
final String fileNameToKeep = getStateFileName(currentGeneration);
for (Path location : locations) {
logger.trace("cleanupOldFiles: cleaning up {}", location);
Path stateLocation = location.resolve(STATE_DIR_NAME);
try (Directory stateDir = newDirectory(stateLocation)) {
for (String file : stateDir.listAll()) {
if (file.startsWith(prefix) && file.equals(fileNameToKeep) == false) {
deleteFileIgnoreExceptions(stateLocation, stateDir, file);
}
}
} catch (Exception e) {
logger.trace("clean up failed for state location {}", stateLocation);
}
}
}
/**
* Finds state file with maximum id.
*
* @param prefix - filename prefix
* @param locations - paths to directories with state folder
* @return maximum id of state file or -1 if no such files are found
* @throws IOException if IOException occurs
*/
long findMaxGenerationId(final String prefix, Path... locations) throws IOException {
long maxId = -1;
for (Path dataLocation : locations) {
final Path resolve = dataLocation.resolve(STATE_DIR_NAME);
if (Files.exists(resolve)) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(resolve, prefix + "*")) {
for (Path stateFile : stream) {
final Matcher matcher = stateFilePattern.matcher(stateFile.getFileName().toString());
if (matcher.matches()) {
final long id = Long.parseLong(matcher.group(1));
maxId = Math.max(maxId, id);
}
}
}
}
}
return maxId;
}
List<Path> findStateFilesByGeneration(final long generation, Path... locations) {
List<Path> files = new ArrayList<>();
if (generation == -1) {
return files;
}
final String fileName = getStateFileName(generation);
for (Path dataLocation : locations) {
final Path stateFilePath = dataLocation.resolve(STATE_DIR_NAME).resolve(fileName);
if (Files.exists(stateFilePath)) {
logger.trace("found state file: {}", stateFilePath);
files.add(stateFilePath);
}
}
return files;
}
public String getStateFileName(long generation) {
return prefix + generation + STATE_FILE_EXTENSION;
}
/**
* Tries to load the state of particular generation from the given data-locations. If any of data locations contain state files with
* given generation, state will be loaded from these state files.
*
* @param logger a logger instance.
* @param generation the generation to be loaded.
* @param dataLocations the data-locations to try.
* @return the state of asked generation or <code>null</code> if no state was found.
*/
public T loadGeneration(Logger logger, NamedXContentRegistry namedXContentRegistry, long generation, Path... dataLocations) {
List<Path> stateFiles = findStateFilesByGeneration(generation, dataLocations);
final List<Throwable> exceptions = new ArrayList<>();
for (Path stateFile : stateFiles) {
try {
T state = read(namedXContentRegistry, stateFile);
if (logger.isTraceEnabled()) {
logger.trace("generation id [{}] read from [{}]", generation, stateFile.getFileName());
}
return state;
} catch (Exception e) {
exceptions.add(new IOException("failed to read " + stateFile, e));
logger.debug(() -> format("%s: failed to read [%s], ignoring...", stateFile, prefix), e);
}
}
// if we reach this something went wrong
ExceptionsHelper.maybeThrowRuntimeAndSuppress(exceptions);
if (stateFiles.size() > 0) {
// We have some state files but none of them gave us a usable state
throw new IllegalStateException(
"Could not find a state file to recover from among "
+ stateFiles.stream().map(Object::toString).collect(Collectors.joining(", "))
);
}
return null;
}
/**
* Tries to load the latest state from the given data-locations.
*
* @param logger a logger instance.
* @param dataLocations the data-locations to try.
* @return tuple of the latest state and generation. (null, -1) if no state is found.
*/
public Tuple<T, Long> loadLatestStateWithGeneration(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations)
throws IOException {
long generation = findMaxGenerationId(prefix, dataLocations);
T state = loadGeneration(logger, namedXContentRegistry, generation, dataLocations);
// It may not be possible to get into this state, if there's a bad state file the above
// call will throw ElasticsearchException. If there are no state files, we won't find a
// generation.
if (generation > -1 && state == null) {
throw new IllegalStateException(
"unable to find state files with generation id "
+ generation
+ " returned by findMaxGenerationId function, in data folders ["
+ Arrays.stream(dataLocations).map(Object::toString).collect(Collectors.joining(", "))
+ "], concurrent writes?"
);
}
return Tuple.tuple(state, generation);
}
/**
* Tries to load the latest state from the given data-locations.
*
* @param logger a logger instance.
* @param dataLocations the data-locations to try.
* @return the latest state or <code>null</code> if no state was found.
*/
@Nullable
public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations) throws IOException {
return loadLatestStateWithGeneration(logger, namedXContentRegistry, dataLocations).v1();
}
/**
* Deletes all meta state directories recursively for the given data locations
* @param dataLocations the data location to delete
*/
public static void deleteMetaState(Path... dataLocations) throws IOException {
Path[] stateDirectories = new Path[dataLocations.length];
for (int i = 0; i < dataLocations.length; i++) {
stateDirectories[i] = dataLocations[i].resolve(STATE_DIR_NAME);
}
IOUtils.rm(stateDirectories);
}
public String getPrefix() {
return prefix;
}
}
| for |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/methodargs/CustomExecutor.java | {
"start": 185,
"end": 232
} | class ____ extends BaseExecutor {
}
| CustomExecutor |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java | {
"start": 32684,
"end": 34095
} | class ____ extends OptimizerExpressionRule<Case> {
SimplifyCase() {
super(TransformDirection.DOWN);
}
@Override
protected Expression rule(Case c) {
Expression e = c;
// Remove or foldable conditions that fold to FALSE
// Stop at the 1st foldable condition that folds to TRUE
List<IfConditional> newConditions = new ArrayList<>();
for (IfConditional conditional : c.conditions()) {
if (conditional.condition().foldable()) {
Boolean res = (Boolean) conditional.condition().fold();
if (res == Boolean.TRUE) {
newConditions.add(conditional);
break;
}
} else {
newConditions.add(conditional);
}
}
if (newConditions.size() < c.children().size()) {
e = c.replaceChildren(combine(newConditions, c.elseResult()));
}
return e;
}
}
/**
* Any numeric aggregates (avg, min, max, sum) acting on literals are converted to an iif(count(1)=0, null, literal*count(1)) for sum,
* and to iif(count(1)=0,null,literal) for the other three.
* Additionally count(DISTINCT literal) is converted to iif(count(1)=0, 0, 1).
*/
private static | SimplifyCase |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/task/ThreadPoolTaskSchedulerCustomizer.java | {
"start": 910,
"end": 1153
} | interface ____ {
/**
* Callback to customize a {@link ThreadPoolTaskScheduler} instance.
* @param taskScheduler the task scheduler to customize
*/
void customize(ThreadPoolTaskScheduler taskScheduler);
}
| ThreadPoolTaskSchedulerCustomizer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StaticMockMemberTest.java | {
"start": 2027,
"end": 2505
} | class ____ {
@Mock private String mockedPrivateString;
@Mock String mockedString;
}
""")
.doTest();
}
@Test
public void negativeCases() {
compilationHelper
.addSourceLines(
"in/Test.java",
"""
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
@RunWith(JUnit4.class)
public | Test |
java | quarkusio__quarkus | extensions/amazon-lambda/deployment/src/test/java/io/quarkus/amazon/lambda/deployment/testing/AbstractInputCollectionOutputCollectionLambdaImplTest.java | {
"start": 672,
"end": 1869
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap
.create(JavaArchive.class)
.addClasses(AbstractInputCollectionOutputCollectionLambdaImpl.class, AbstractInputCollectionOutputCollection.class,
InputPerson.class, OutputPerson.class));
@Test
void abstractRequestHandler_InputCollectionInputPerson_OutputCollectionOutputPerson() {
List<InputPerson> personList = new ArrayList<>();
personList.add(new InputPerson("Chris"));
personList.add(new InputPerson("Fred"));
given()
.body(personList)
.when()
.post()
.then()
.statusCode(200)
.body("", hasItem(hasEntry("outputname", "Chris"))) // OutputPerson serializes name with key outputname
.body("", hasItem(hasEntry("outputname", "Fred")))
.body("", not(hasItem(hasEntry("name", "Chris")))) // make sure that there is no key name
.body("", not(hasItem(hasEntry("name", "Fred"))));
}
}
| AbstractInputCollectionOutputCollectionLambdaImplTest |
java | spring-projects__spring-security | config/src/integration-test/java/org/springframework/security/config/annotation/authentication/ldap/LdapAuthenticationProviderBuilderSecurityBuilderTests.java | {
"start": 10791,
"end": 11257
} | class ____ extends BaseLdapProviderConfig {
@Bean
UnboundIdContainer ldapServer() throws Exception {
UnboundIdContainer unboundIdContainer = new UnboundIdContainer("dc=springframework,dc=org",
"classpath:/test-server.ldif");
unboundIdContainer.setPort(getPort());
return unboundIdContainer;
}
}
@Configuration
@EnableWebSecurity
@EnableGlobalAuthentication
@Import(ObjectPostProcessorConfiguration.class)
abstract static | BaseLdapServerConfig |
java | processing__processing4 | java/src/processing/mode/java/Compiler.java | {
"start": 7156,
"end": 8307
} | class ____'s built-in.");
handleCrustyCode();
} else {
exception.setMessage("The package " +
"\u201C" + m[1] + "\u201D" +
" does not exist. " +
"You might be missing a library.");
System.err.println("Libraries must be " +
"installed in a folder named 'libraries' " +
"inside the sketchbook folder " +
"(see the Preferences window).");
}
}
} else if (errorMessage.endsWith("cannot be resolved to a type")) {
// xxx cannot be resolved to a type
//xxx c;
String what = errorMessage.substring(0, errorMessage.indexOf(' '));
if (what.equals("BFont") ||
what.equals("BGraphics") ||
what.equals("BImage")) {
exception.setMessage(what + " has been replaced with P" + what.substring(1));
handleCrustyCode();
} else {
exception.setMessage("Cannot find a | that |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest12.java | {
"start": 837,
"end": 1181
} | class ____ extends TestCase {
public void test_true() throws Exception {
assertTrue(WallUtils.isValidateMySql(//
"SELECT a.* FROM lhwbbs_threads a " +
"WHERE 1 AND a.disabled=0 AND a.ifupload&1=1 " +
"ORDER BY a.created_time DESC LIMIT 0,100"));
}
}
| MySqlWallTest12 |
java | apache__camel | tooling/maven/camel-package-maven-plugin/src/it/HeaderSupport/src/main/java/org/apache/camel/component/foo/FooConstants.java | {
"start": 896,
"end": 1083
} | class ____ {
@Metadata(description = "My description of the SomeHeader")
public static final String SOME_HEADER = "SomeHeaderName";
private FooConstants() {
}
}
| FooConstants |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/env/ExplicitPropertiesFileTestPropertySourceTests.java | {
"start": 2564,
"end": 2766
} | class ____ extends AbstractExplicitPropertiesFileTests {
}
@Nested
@DisplayName("with relative paths")
@TestPropertySource("../env/../env/../../context/env/explicit.properties")
| RelativePathTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/metrics/groups/InternalSourceSplitMetricGroup.java | {
"start": 1572,
"end": 7748
} | class ____ extends ProxyMetricGroup<MetricGroup>
implements SourceSplitMetricGroup {
static final Logger LOG = LoggerFactory.getLogger(InternalSourceSplitMetricGroup.class);
private final TimerGauge pausedTimePerSecond;
private final TimerGauge idleTimePerSecond;
private final Gauge<Long> currentWatermarkGauge;
private final Clock clock;
private static final String SPLIT = "split";
private static final String WATERMARK = "watermark";
private static final long SPLIT_NOT_STARTED = -1L;
private long splitStartTime = SPLIT_NOT_STARTED;
private final MetricGroup splitWatermarkMetricGroup;
private InternalSourceSplitMetricGroup(
MetricGroup parentMetricGroup,
Clock clock,
String splitId,
Gauge<Long> currentWatermark) {
super(parentMetricGroup);
this.clock = clock;
splitWatermarkMetricGroup = parentMetricGroup.addGroup(SPLIT, splitId).addGroup(WATERMARK);
pausedTimePerSecond =
splitWatermarkMetricGroup.gauge(
MetricNames.SPLIT_PAUSED_TIME, new TimerGauge(clock));
idleTimePerSecond =
splitWatermarkMetricGroup.gauge(MetricNames.SPLIT_IDLE_TIME, new TimerGauge(clock));
splitWatermarkMetricGroup.gauge(
MetricNames.SPLIT_ACTIVE_TIME, this::getActiveTimePerSecond);
splitWatermarkMetricGroup.gauge(
MetricNames.ACC_SPLIT_PAUSED_TIME, this::getAccumulatedPausedTime);
splitWatermarkMetricGroup.gauge(
MetricNames.ACC_SPLIT_ACTIVE_TIME, this::getAccumulatedActiveTime);
splitWatermarkMetricGroup.gauge(
MetricNames.ACC_SPLIT_IDLE_TIME, this::getAccumulatedIdleTime);
currentWatermarkGauge =
splitWatermarkMetricGroup.gauge(
MetricNames.SPLIT_CURRENT_WATERMARK, currentWatermark);
}
public static InternalSourceSplitMetricGroup wrap(
OperatorMetricGroup operatorMetricGroup, String splitId, Gauge<Long> currentWatermark) {
return new InternalSourceSplitMetricGroup(
operatorMetricGroup, SystemClock.getInstance(), splitId, currentWatermark);
}
@VisibleForTesting
public static InternalSourceSplitMetricGroup mock(
MetricGroup metricGroup, String splitId, Gauge<Long> currentWatermark) {
return new InternalSourceSplitMetricGroup(
metricGroup, SystemClock.getInstance(), splitId, currentWatermark);
}
@VisibleForTesting
public static InternalSourceSplitMetricGroup wrap(
OperatorMetricGroup operatorMetricGroup,
Clock clock,
String splitId,
Gauge<Long> currentWatermark) {
return new InternalSourceSplitMetricGroup(
operatorMetricGroup, clock, splitId, currentWatermark);
}
public void markSplitStart() {
splitStartTime = clock.absoluteTimeMillis();
}
public void maybeMarkSplitStart() {
if (splitStartTime == SPLIT_NOT_STARTED) {
markSplitStart();
}
}
public long getCurrentWatermark() {
return this.currentWatermarkGauge.getValue();
}
public void markPaused() {
maybeMarkSplitStart();
if (isIdle()) {
// If a split got paused it means it emitted records,
// hence it shouldn't be considered idle anymore
markNotIdle();
LOG.warn("Split marked paused while still idle");
}
this.pausedTimePerSecond.markStart();
}
public void markIdle() {
maybeMarkSplitStart();
if (isPaused()) {
// If a split is marked idle, it has no records to emit.
// hence it shouldn't be considered paused anymore
markNotPaused();
LOG.warn("Split marked idle while still paused");
}
this.idleTimePerSecond.markStart();
}
public void markNotPaused() {
maybeMarkSplitStart();
this.pausedTimePerSecond.markEnd();
}
public void markNotIdle() {
maybeMarkSplitStart();
this.idleTimePerSecond.markEnd();
}
public double getActiveTimePerSecond() {
if (splitStartTime == SPLIT_NOT_STARTED) {
return 0L;
}
double activeTimePerSecond = 1000.0 - getPausedTimePerSecond() - getIdleTimePerSecond();
return Math.max(activeTimePerSecond, 0);
}
public double getAccumulatedActiveTime() {
if (splitStartTime == SPLIT_NOT_STARTED) {
return 0L;
}
return Math.max(
clock.absoluteTimeMillis()
- splitStartTime
- getAccumulatedPausedTime()
- getAccumulatedIdleTime(),
0);
}
public long getAccumulatedIdleTime() {
return idleTimePerSecond.getAccumulatedCount();
}
public long getIdleTimePerSecond() {
return idleTimePerSecond.getValue();
}
public long getPausedTimePerSecond() {
return pausedTimePerSecond.getValue();
}
public long getAccumulatedPausedTime() {
return pausedTimePerSecond.getAccumulatedCount();
}
public Boolean isPaused() {
return pausedTimePerSecond.isMeasuring();
}
public Boolean isIdle() {
return idleTimePerSecond.isMeasuring();
}
public Boolean isActive() {
return !isPaused() && !isIdle();
}
public void onSplitFinished() {
if (splitWatermarkMetricGroup instanceof AbstractMetricGroup) {
((AbstractMetricGroup) splitWatermarkMetricGroup).close();
} else {
if (splitWatermarkMetricGroup != null) {
LOG.warn(
"Split watermark metric group can not be closed, expecting an instance of AbstractMetricGroup but got: ",
splitWatermarkMetricGroup.getClass().getName());
}
}
}
@VisibleForTesting
public MetricGroup getSplitWatermarkMetricGroup() {
return splitWatermarkMetricGroup;
}
}
| InternalSourceSplitMetricGroup |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java | {
"start": 14549,
"end": 14756
} | interface ____ java passing null.
*/
@Test
public void testNullArgs() throws IOException {
GenericOptionsParser parser = new GenericOptionsParser(conf, null);
parser.getRemainingArgs();
}
}
| from |
java | elastic__elasticsearch | modules/reindex/src/test/java/org/elasticsearch/reindex/UpdateByQueryVersionTests.java | {
"start": 746,
"end": 1862
} | class ____ extends AbstractAsyncBulkByScrollActionMetadataTestCase<UpdateByQueryRequest, BulkByScrollResponse> {
UpdateByQueryRequest request;
public void testVersion() {
request = null;
assertFalse(action().mainRequest.getSearchRequest().source().version());
request = new UpdateByQueryRequest();
request.getSearchRequest().source().version(false);
assertFalse(action().mainRequest.getSearchRequest().source().version());
request = new UpdateByQueryRequest();
request.getSearchRequest().source().version(null);
assertFalse(action().mainRequest.getSearchRequest().source().version());
request = new UpdateByQueryRequest();
request.getSearchRequest().source().version(true);
assertTrue(action().mainRequest.getSearchRequest().source().version());
}
@Override
protected TestAction action() {
return new TestAction();
}
@Override
protected UpdateByQueryRequest request() {
return request != null ? request : new UpdateByQueryRequest();
}
private | UpdateByQueryVersionTests |
java | quarkusio__quarkus | extensions/jackson/deployment/src/test/java/io/quarkus/jackson/deployment/JacksonFailOnEmptyBeansNotSetTest.java | {
"start": 770,
"end": 919
} | class ____ {
private final String property;
public Pojo(String property) {
this.property = property;
}
}
}
| Pojo |
java | elastic__elasticsearch | x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java | {
"start": 1439,
"end": 16317
} | class ____ extends ESTestCase {
public void testCaching() {
// Emulated search requests that an enrich processor could generate:
// (two unique searches for two enrich policies)
var projectId = randomProjectIdOrDefault();
var cacheKey1 = new EnrichCache.CacheKey(projectId, "policy1-1", "1", 1);
var cacheKey2 = new EnrichCache.CacheKey(projectId, "policy1-1", "2", 1);
var cacheKey3 = new EnrichCache.CacheKey(projectId, "policy2-1", "1", 1);
var cacheKey4 = new EnrichCache.CacheKey(projectId, "policy2-1", "2", 1);
// Emulated search response (content doesn't matter, since it isn't used, it just a cache entry)
EnrichCache.CacheValue searchResponse = new EnrichCache.CacheValue(List.of(Map.of("test", "entry")), 1L);
EnrichCache enrichCache = new EnrichCache(3);
enrichCache.put(cacheKey1, searchResponse);
enrichCache.put(cacheKey2, searchResponse);
enrichCache.put(cacheKey3, searchResponse);
var cacheStats = enrichCache.getStats("_id");
assertThat(cacheStats.count(), equalTo(3L));
assertThat(cacheStats.hits(), equalTo(0L));
assertThat(cacheStats.misses(), equalTo(0L));
assertThat(cacheStats.evictions(), equalTo(0L));
assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L));
assertThat(enrichCache.get(cacheKey1), notNullValue());
assertThat(enrichCache.get(cacheKey2), notNullValue());
assertThat(enrichCache.get(cacheKey3), notNullValue());
assertThat(enrichCache.get(cacheKey4), nullValue());
cacheStats = enrichCache.getStats("_id");
assertThat(cacheStats.count(), equalTo(3L));
assertThat(cacheStats.hits(), equalTo(3L));
assertThat(cacheStats.misses(), equalTo(1L));
assertThat(cacheStats.evictions(), equalTo(0L));
assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L));
enrichCache.put(cacheKey4, searchResponse);
cacheStats = enrichCache.getStats("_id");
assertThat(cacheStats.count(), equalTo(3L));
assertThat(cacheStats.hits(), equalTo(3L));
assertThat(cacheStats.misses(), equalTo(1L));
assertThat(cacheStats.evictions(), equalTo(1L));
assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L));
cacheKey1 = new EnrichCache.CacheKey(projectId, "policy1-2", "1", 1);
cacheKey2 = new EnrichCache.CacheKey(projectId, "policy1-2", "2", 1);
cacheKey3 = new EnrichCache.CacheKey(projectId, "policy2-2", "1", 1);
cacheKey4 = new EnrichCache.CacheKey(projectId, "policy2-2", "2", 1);
// Because enrich index has changed, cache can't serve cached entries
assertThat(enrichCache.get(cacheKey1), nullValue());
assertThat(enrichCache.get(cacheKey2), nullValue());
assertThat(enrichCache.get(cacheKey3), nullValue());
assertThat(enrichCache.get(cacheKey4), nullValue());
// Add new entries using new enrich index name as key
enrichCache.put(cacheKey1, searchResponse);
enrichCache.put(cacheKey2, searchResponse);
enrichCache.put(cacheKey3, searchResponse);
// Entries can now be served:
assertThat(enrichCache.get(cacheKey1), notNullValue());
assertThat(enrichCache.get(cacheKey2), notNullValue());
assertThat(enrichCache.get(cacheKey3), notNullValue());
assertThat(enrichCache.get(cacheKey4), nullValue());
cacheStats = enrichCache.getStats("_id");
assertThat(cacheStats.count(), equalTo(3L));
assertThat(cacheStats.hits(), equalTo(6L));
assertThat(cacheStats.misses(), equalTo(6L));
assertThat(cacheStats.evictions(), equalTo(4L));
assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L));
}
public void testComputeIfAbsent() throws InterruptedException {
// Emulated search requests that an enrich processor could generate:
// (two unique searches for two enrich policies)
final List<Map<String, ?>> searchResponseMap = List.of(
Map.of("key1", "value1", "key2", "value2"),
Map.of("key3", "value3", "key4", "value4")
);
final AtomicLong testNanoTime = new AtomicLong(0);
// We use a relative time provider that increments 1ms every time it is called. So each operation appears to take 1ms
EnrichCache enrichCache = new EnrichCache(3, () -> testNanoTime.addAndGet(TimeValue.timeValueMillis(1).getNanos()));
ProjectId projectId = randomProjectIdOrDefault();
long expectedMisses = 0L;
{
// Do initial computeIfAbsent, assert that it is a cache miss and the search is performed:
CountDownLatch queriedDatabaseLatch = new CountDownLatch(1);
CountDownLatch notifiedOfResultLatch = new CountDownLatch(1);
enrichCache.computeIfAbsent(projectId, "policy1-1", "1", 1, (searchResponseActionListener) -> {
SearchResponse searchResponse = convertToSearchResponse(searchResponseMap);
searchResponseActionListener.onResponse(searchResponse);
searchResponse.decRef();
queriedDatabaseLatch.countDown();
}, assertNoFailureListener(response -> {
assertThat(response, equalTo(searchResponseMap));
notifiedOfResultLatch.countDown();
}));
assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true));
assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true));
EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10));
assertThat(cacheStats.count(), equalTo(1L));
assertThat(cacheStats.hits(), equalTo(0L));
assertThat(cacheStats.misses(), equalTo(++expectedMisses));
assertThat(cacheStats.evictions(), equalTo(0L));
assertThat(cacheStats.hitsTimeInMillis(), equalTo(0L));
assertThat(cacheStats.missesTimeInMillis(), equalTo(2L)); // cache query and enrich query + cache put
}
{
// Do the same call, assert that it is a cache hit and no search is performed:
CountDownLatch notifiedOfResultLatch = new CountDownLatch(1);
enrichCache.computeIfAbsent(projectId, "policy1-1", "1", 1, (searchResponseActionListener) -> {
fail("Expected no call to the database because item should have been in the cache");
}, assertNoFailureListener(r -> notifiedOfResultLatch.countDown()));
assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true));
EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10));
assertThat(cacheStats.count(), equalTo(1L));
assertThat(cacheStats.hits(), equalTo(1L));
assertThat(cacheStats.misses(), equalTo(expectedMisses));
assertThat(cacheStats.evictions(), equalTo(0L));
assertThat(cacheStats.hitsTimeInMillis(), equalTo(1L));
assertThat(cacheStats.missesTimeInMillis(), equalTo(2L));
}
{
// Do a computeIfAbsent with a different index, assert that it is a cache miss and the search is performed:
CountDownLatch queriedDatabaseLatch = new CountDownLatch(1);
CountDownLatch notifiedOfResultLatch = new CountDownLatch(1);
enrichCache.computeIfAbsent(projectId, "policy1-2", "1", 1, (searchResponseActionListener) -> {
SearchResponse searchResponse = convertToSearchResponse(searchResponseMap);
searchResponseActionListener.onResponse(searchResponse);
searchResponse.decRef();
queriedDatabaseLatch.countDown();
}, assertNoFailureListener(response -> {
assertThat(response, equalTo(searchResponseMap));
notifiedOfResultLatch.countDown();
}));
assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true));
assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true));
EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10));
assertThat(cacheStats.misses(), equalTo(++expectedMisses));
}
{
// Do a computeIfAbsent with a different project, assert that it is a cache miss and the search is performed:
CountDownLatch queriedDatabaseLatch = new CountDownLatch(1);
CountDownLatch notifiedOfResultLatch = new CountDownLatch(1);
enrichCache.computeIfAbsent(randomUniqueProjectId(), "policy1-1", "1", 1, (searchResponseActionListener) -> {
SearchResponse searchResponse = convertToSearchResponse(searchResponseMap);
searchResponseActionListener.onResponse(searchResponse);
searchResponse.decRef();
queriedDatabaseLatch.countDown();
}, assertNoFailureListener(response -> {
assertThat(response, equalTo(searchResponseMap));
notifiedOfResultLatch.countDown();
}));
assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true));
assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true));
EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10));
assertThat(cacheStats.misses(), equalTo(++expectedMisses));
}
{
// Do a computeIfAbsent with a different lookup value, assert that it is a cache miss and the search is performed:
CountDownLatch queriedDatabaseLatch = new CountDownLatch(1);
CountDownLatch notifiedOfResultLatch = new CountDownLatch(1);
enrichCache.computeIfAbsent(projectId, "policy1-1", "2", 1, (searchResponseActionListener) -> {
SearchResponse searchResponse = convertToSearchResponse(searchResponseMap);
searchResponseActionListener.onResponse(searchResponse);
searchResponse.decRef();
queriedDatabaseLatch.countDown();
}, assertNoFailureListener(response -> {
assertThat(response, equalTo(searchResponseMap));
notifiedOfResultLatch.countDown();
}));
assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true));
assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true));
EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10));
assertThat(cacheStats.misses(), equalTo(++expectedMisses));
}
{
// Do a computeIfAbsent with a different max matches, assert that it is a cache miss and the search is performed:
CountDownLatch queriedDatabaseLatch = new CountDownLatch(1);
CountDownLatch notifiedOfResultLatch = new CountDownLatch(1);
enrichCache.computeIfAbsent(projectId, "policy1-1", "1", 3, (searchResponseActionListener) -> {
SearchResponse searchResponse = convertToSearchResponse(searchResponseMap);
searchResponseActionListener.onResponse(searchResponse);
searchResponse.decRef();
queriedDatabaseLatch.countDown();
}, assertNoFailureListener(response -> {
assertThat(response, equalTo(searchResponseMap));
notifiedOfResultLatch.countDown();
}));
assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true));
assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true));
EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10));
assertThat(cacheStats.misses(), equalTo(++expectedMisses));
}
}
private SearchResponse convertToSearchResponse(List<Map<String, ?>> searchResponseList) {
SearchHit[] hitArray = searchResponseList.stream().map(map -> {
try {
return SearchHit.unpooled(0, "id").sourceRef(convertMapToJson(map));
} catch (IOException e) {
throw new RuntimeException(e);
}
}).toArray(SearchHit[]::new);
SearchHits hits = SearchHits.unpooled(hitArray, null, 0);
return SearchResponseUtils.response(hits).shards(5, 4, 0).build();
}
private BytesReference convertMapToJson(Map<String, ?> simpleMap) throws IOException {
try (XContentBuilder builder = JsonXContent.contentBuilder().map(simpleMap)) {
return BytesReference.bytes(builder);
}
}
public void testDeepCopy() {
Map<String, Object> original = new HashMap<>();
{
original.put("foo", "bar");
original.put("int", 123);
original.put("double", 123.0D);
Map<String, Object> innerObject = new HashMap<>();
innerObject.put("buzz", "hello world");
innerObject.put("foo_null", null);
innerObject.put("1", "bar");
innerObject.put("long", 123L);
List<String> innerInnerList = new ArrayList<>();
innerInnerList.add("item1");
List<Object> innerList = new ArrayList<>();
innerList.add(innerInnerList);
innerObject.put("list", innerList);
original.put("fizz", innerObject);
List<Map<String, Object>> list = new ArrayList<>();
Map<String, Object> value = new HashMap<>();
value.put("field", "value");
list.add(value);
list.add(null);
original.put("list", list);
List<String> list2 = new ArrayList<>();
list2.add("foo");
list2.add("bar");
list2.add("baz");
original.put("list2", list2);
}
Map<?, ?> result = EnrichCache.deepCopy(original, false);
assertThat(result, equalTo(original));
assertThat(result, not(sameInstance(original)));
result = EnrichCache.deepCopy(original, true);
assertThat(result, equalTo(original));
assertThat(result, not(sameInstance(original)));
Map<?, ?> innerMap = (Map<?, ?>) result.get("fizz");
expectThrows(UnsupportedOperationException.class, () -> innerMap.remove("x"));
List<?> innerList = (List<?>) result.get("list");
expectThrows(UnsupportedOperationException.class, () -> innerList.remove(0));
original.put("embedded_object", new byte[] { 1, 2, 3 });
result = EnrichCache.deepCopy(original, false);
assertArrayEquals(new byte[] { 1, 2, 3 }, (byte[]) result.get("embedded_object"));
}
}
| EnrichCacheTests |
java | apache__camel | components/camel-test/camel-test-spring-junit5/src/test/java/org/apache/camel/test/spring/CamelSpringRouteProcessorDumpRouteTest.java | {
"start": 1195,
"end": 1686
} | class ____ extends CamelSpringPlainTest {
@BeforeAll
public static void prepareFiles() throws Exception {
TestSupport.deleteDirectory("target/camel-route-dump");
}
@Test
@Override
public void testRouteCoverage() {
camelContext.stop();
// there should be files
String[] names = new File("target/camel-route-dump").list();
assertNotNull(names);
assertTrue(names.length > 0);
}
}
| CamelSpringRouteProcessorDumpRouteTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RLiveObjectService.java | {
"start": 9116,
"end": 9268
} | class ____ the service, registering all the classes on
* startup can speed up the instance creation. This is <b>NOT</b> mandatory
* since the | with |
java | quarkusio__quarkus | independent-projects/tools/analytics-common/src/test/java/io/quarkus/analytics/AnalyticsServicePromptTest.java | {
"start": 767,
"end": 3863
} | class ____ extends AnalyticsServiceTestBase {
private FileLocations fileLocations;
private AnalyticsService service;
@BeforeEach
void setUp() throws IOException {
fileLocations = new TestFileLocationsImpl(true);
service = new AnalyticsService(fileLocations, MessageWriter.info());
}
@AfterEach
void tearDown() throws IOException {
((TestFileLocationsImpl) fileLocations).deleteAll();
service = null;
}
@Test
void testConsoleQuestion_yes() throws IOException {
assertFalse(fileLocations.getLocalConfigFile().toFile().exists());
service.buildAnalyticsUserInput((String prompt) -> {
assertEquals(ACCEPTANCE_PROMPT, prompt);
return "y";
});
assertTrue(fileLocations.getLocalConfigFile().toFile().exists());
Optional<LocalConfig> localConfig = FileUtils.read(LocalConfig.class, fileLocations.getLocalConfigFile(),
MessageWriter.info());
assertTrue(localConfig.isPresent());
assertFalse(localConfig.get().isDisabled());
}
@Test
void testConsoleQuestion_no() throws IOException {
assertFalse(fileLocations.getLocalConfigFile().toFile().exists());
service.buildAnalyticsUserInput((String prompt) -> {
assertEquals(ACCEPTANCE_PROMPT, prompt);
return "n";
});
assertTrue(fileLocations.getLocalConfigFile().toFile().exists());
Optional<LocalConfig> localConfig = FileUtils.read(LocalConfig.class, fileLocations.getLocalConfigFile(),
MessageWriter.info());
assertTrue(localConfig.isPresent());
assertTrue(localConfig.get().isDisabled());
}
@Test
void testConsoleQuestion_promptTimeout() throws IOException {
try {
System.setProperty("quarkus.analytics.prompt.timeout", "0");
assertFalse(fileLocations.getLocalConfigFile().toFile().exists());
service.buildAnalyticsUserInput((String prompt) -> {
assertEquals(ACCEPTANCE_PROMPT, prompt);
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return "n";
});
assertFalse(fileLocations.getLocalConfigFile().toFile().exists());
} finally {
System.clearProperty("quarkus.analytics.prompt.timeout");
}
}
@Test
void testConsoleQuestion_AnalyticsDisabled() throws IOException {
try {
System.setProperty("quarkus.analytics.disabled", "true");
assertFalse(fileLocations.getLocalConfigFile().toFile().exists());
service.buildAnalyticsUserInput((String prompt) -> {
fail("Prompt should be disabled");
return "n";
});
assertFalse(fileLocations.getLocalConfigFile().toFile().exists());
} finally {
System.clearProperty("quarkus.analytics.disabled");
}
}
}
| AnalyticsServicePromptTest |
java | google__guava | android/guava/src/com/google/common/collect/SortedIterables.java | {
"start": 972,
"end": 2043
} | class ____ {
private SortedIterables() {}
/**
* Returns {@code true} if {@code elements} is a sorted collection using an ordering equivalent to
* {@code comparator}.
*/
public static boolean hasSameComparator(Comparator<?> comparator, Iterable<?> elements) {
checkNotNull(comparator);
checkNotNull(elements);
Comparator<?> comparator2;
if (elements instanceof SortedSet) {
comparator2 = comparator((SortedSet<?>) elements);
} else if (elements instanceof SortedIterable) {
comparator2 = ((SortedIterable<?>) elements).comparator();
} else {
return false;
}
return comparator.equals(comparator2);
}
@SuppressWarnings("unchecked")
// if sortedSet.comparator() is null, the set must be naturally ordered
public static <E extends @Nullable Object> Comparator<? super E> comparator(
SortedSet<E> sortedSet) {
Comparator<? super E> result = sortedSet.comparator();
if (result == null) {
result = (Comparator<? super E>) Ordering.natural();
}
return result;
}
}
| SortedIterables |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlExtPartition.java | {
"start": 336,
"end": 1078
} | class ____ extends MySqlObjectImpl implements Cloneable {
private final List<Item> items = new ArrayList<Item>();
public List<Item> getItems() {
return items;
}
@Override
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
for (int i = 0; i < items.size(); i++) {
items.get(i).accept(visitor);
}
}
visitor.endVisit(this);
}
public MySqlExtPartition clone() {
MySqlExtPartition x = new MySqlExtPartition();
for (Item item : items) {
Item item1 = item.clone();
item1.setParent(x);
x.items.add(item1);
}
return x;
}
public static | MySqlExtPartition |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/function/json/JsonMergepatchTest.java | {
"start": 976,
"end": 1273
} | class ____ {
@Test
public void testSimple(SessionFactoryScope scope) {
scope.inSession( em -> {
//tag::hql-json-mergepatch-example[]
em.createQuery( "select json_mergepatch('{\"a\":1}', '{\"b\":2}')" ).getResultList();
//end::hql-json-mergepatch-example[]
} );
}
}
| JsonMergepatchTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/BeanInvocationThrowsExceptionTest.java | {
"start": 2442,
"end": 2738
} | class ____ {
public int doSomething(String request) {
if (request.equals("Hello London")) {
return 1;
} else {
throw new IllegalArgumentException("Forced exception");
}
}
}
// END SNIPPET: e1
}
| ExceptionBean |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/component/AbstractApiEndpoint.java | {
"start": 11525,
"end": 13374
} | class ____
final String endpointClassName = endpointClass.getName();
ExecutorService executorService = EXECUTOR_SERVICE_MAP.get(endpointClassName);
// CamelContext will shutdown thread pool when it shutdown so we can
// lazy create it on demand
// but in case of hot-deploy or the likes we need to be able to
// re-create it (its a shared static instance)
if (executorService == null || executorService.isTerminated() || executorService.isShutdown()) {
final ExecutorServiceManager manager = context.getExecutorServiceManager();
// try to lookup a pool first based on profile
ThreadPoolProfile poolProfile = manager.getThreadPoolProfile(
threadProfileName);
if (poolProfile == null) {
poolProfile = manager.getDefaultThreadPoolProfile();
}
// create a new pool using the custom or default profile
executorService = manager.newScheduledThreadPool(endpointClass, threadProfileName, poolProfile);
EXECUTOR_SERVICE_MAP.put(endpointClassName, executorService);
}
return executorService;
}
public final ExecutorService getExecutorService() {
if (executorService == null) {
lock.lock();
try {
if (executorService == null) {
executorService = getExecutorService(getClass(), getCamelContext(), getThreadProfileName());
}
} finally {
lock.unlock();
}
}
return executorService;
}
/**
* Returns Thread profile name. Generated as a constant THREAD_PROFILE_NAME in *Constants.
*
* @return thread profile name to use.
*/
protected abstract String getThreadProfileName();
}
| name |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/EventLoopFlow.java | {
"start": 1411,
"end": 2093
} | class ____ <b>not</b> thread-safe: The invariants for calls to {@link #executeNow} are very
* strict. In particular:
* <ul>
* <li>There must be no concurrent calls to {@link #executeNow}.</li>
* <li>When {@link #executeNow} returns {@code true}, the subsequent execution of the child
* method ({@code onNext0} in the above example) must fully complete before the next
* {@link #executeNow} call. This ensures that there are no concurrent calls to the child
* method.</li>
* </ul>
* Both of these invariants are guaranteed by the reactive spec, but may not apply to other use
* cases.
*
* @since 4.4.0
* @author Jonas Konrad
*/
@Internal
public final | is |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ResourceElementResolver.java | {
"start": 10425,
"end": 11142
} | class ____ extends DependencyDescriptor {
private final Class<?> lookupType;
private final boolean lazyLookup;
public LookupDependencyDescriptor(Field field, Class<?> lookupType, boolean lazyLookup) {
super(field, true);
this.lookupType = lookupType;
this.lazyLookup = lazyLookup;
}
public LookupDependencyDescriptor(Method method, Class<?> lookupType, boolean lazyLookup) {
super(new MethodParameter(method, 0), true);
this.lookupType = lookupType;
this.lazyLookup = lazyLookup;
}
@Override
public Class<?> getDependencyType() {
return this.lookupType;
}
@Override
public boolean supportsLazyResolution() {
return !this.lazyLookup;
}
}
}
| LookupDependencyDescriptor |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SaveNamespaceContext.java | {
"start": 1403,
"end": 2593
} | class ____ {
private final FSNamesystem sourceNamesystem;
private final long txid;
private final List<StorageDirectory> errorSDs =
Collections.synchronizedList(new ArrayList<StorageDirectory>());
private final Canceler canceller;
private final CountDownLatch completionLatch = new CountDownLatch(1);
SaveNamespaceContext(
FSNamesystem sourceNamesystem,
long txid,
Canceler canceller) {
this.sourceNamesystem = sourceNamesystem;
this.txid = txid;
this.canceller = canceller;
}
FSNamesystem getSourceNamesystem() {
return sourceNamesystem;
}
long getTxId() {
return txid;
}
void reportErrorOnStorageDirectory(StorageDirectory sd) {
errorSDs.add(sd);
}
List<StorageDirectory> getErrorSDs() {
return errorSDs;
}
void markComplete() {
Preconditions.checkState(completionLatch.getCount() == 1,
"Context already completed!");
completionLatch.countDown();
}
public void checkCancelled() throws SaveNamespaceCancelledException {
if (canceller.isCancelled()) {
throw new SaveNamespaceCancelledException(
canceller.getCancellationReason());
}
}
}
| SaveNamespaceContext |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator2.java | {
"start": 1691,
"end": 7147
} | class ____ {
@Test
void fixed() throws java.io.IOException {
new ResolvingGrammarGenerator().generate(Schema.createFixed("MyFixed", null, null, 10),
Schema.create(Schema.Type.BYTES));
new ResolvingGrammarGenerator().generate(Schema.create(Schema.Type.BYTES),
Schema.createFixed("MyFixed", null, null, 10));
}
Schema point2dFullname = SchemaBuilder.record("Point").namespace("written").fields().requiredDouble("x")
.requiredDouble("y").endRecord();
Schema point3dNoDefault = SchemaBuilder.record("Point").fields().requiredDouble("x").requiredDouble("y")
.requiredDouble("z").endRecord();
Schema point2d = SchemaBuilder.record("Point2D").fields().requiredDouble("x").requiredDouble("y").endRecord();
Schema point3d = SchemaBuilder.record("Point3D").fields().requiredDouble("x").requiredDouble("y").name("z").type()
.doubleType().doubleDefault(0.0).endRecord();
Schema point3dMatchName = SchemaBuilder.record("Point").fields().requiredDouble("x").requiredDouble("y").name("z")
.type().doubleType().doubleDefault(0.0).endRecord();
@Test
void unionResolutionNoStructureMatch() throws Exception {
assertThrows(SchemaValidationException.class, () -> {
// there is a short name match, but the structure does not match
Schema read = Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), point3dNoDefault));
new SchemaValidatorBuilder().canBeReadStrategy().validateAll().validate(point2dFullname,
Collections.singletonList(read));
});
}
@Test
void unionResolutionFirstStructureMatch2d() throws Exception {
// multiple structure matches with no short or full name matches
Schema read = Schema
.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), point3dNoDefault, point2d, point3d));
Symbol grammar = new ResolvingGrammarGenerator().generate(point2dFullname, read);
assertTrue(grammar.production[1] instanceof Symbol.UnionAdjustAction);
Symbol.UnionAdjustAction action = (Symbol.UnionAdjustAction) grammar.production[1];
assertEquals(2, action.rindex);
}
@Test
void unionResolutionFirstStructureMatch3d() throws Exception {
// multiple structure matches with no short or full name matches
Schema read = Schema
.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), point3dNoDefault, point3d, point2d));
Symbol grammar = new ResolvingGrammarGenerator().generate(point2dFullname, read);
assertTrue(grammar.production[1] instanceof Symbol.UnionAdjustAction);
Symbol.UnionAdjustAction action = (Symbol.UnionAdjustAction) grammar.production[1];
assertEquals(2, action.rindex);
}
@Test
void unionResolutionNamedStructureMatch() throws Exception {
// multiple structure matches with a short name match
Schema read = Schema
.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), point2d, point3dMatchName, point3d));
Symbol grammar = new ResolvingGrammarGenerator().generate(point2dFullname, read);
assertTrue(grammar.production[1] instanceof Symbol.UnionAdjustAction);
Symbol.UnionAdjustAction action = (Symbol.UnionAdjustAction) grammar.production[1];
assertEquals(2, action.rindex);
}
@Test
void unionResolutionFullNameMatch() throws Exception {
// there is a full name match, so it should be chosen
Schema read = Schema.createUnion(
Arrays.asList(Schema.create(Schema.Type.NULL), point2d, point3dMatchName, point3d, point2dFullname));
Symbol grammar = new ResolvingGrammarGenerator().generate(point2dFullname, read);
assertTrue(grammar.production[1] instanceof Symbol.UnionAdjustAction);
Symbol.UnionAdjustAction action = (Symbol.UnionAdjustAction) grammar.production[1];
assertEquals(4, action.rindex);
}
@Test
void avro2702StringProperties() throws IOException {
// Create a nested record schema with string fields at two levels.
Schema inner = SchemaBuilder.builder().record("B").fields().requiredString("b1").endRecord();
Schema outer = SchemaBuilder.builder().record("A").fields().requiredString("a1").name("inner").type().unionOf()
.nullType().and().type(inner).endUnion().noDefault().endRecord();
// Make a copy with the two string fields annotated.
Schema outer2 = new Schema.Parser().parse(outer.toString());
outer2.getField("a1").schema().addProp(GenericData.STRING_PROP, "String");
Schema inner2 = outer2.getField("inner").schema().getTypes().get(1);
inner2.getField("b1").schema().addProp(GenericData.STRING_PROP, "String");
// The two schemas are not the same, but they serialize to the same.
assertThat(outer, not(outer2));
// This is a serialized record.
byte[] serialized = { 2, 'a', // a1 is a one character string
2, // Pick the non-null UNION branch and
2, 'b' // Another one character string
};
GenericRecord out = null;
try (ByteArrayInputStream bais = new ByteArrayInputStream(serialized)) {
Decoder decoder = DecoderFactory.get().binaryDecoder(bais, null);
DatumReader<GenericRecord> r = new GenericDatumReader<>(outer, outer2, GenericData.get());
out = r.read(null, decoder);
}
// Assert that the two fields were read and are of type String.
assertThat(out.get("a1"), instanceOf(String.class));
assertThat(((GenericRecord) out.get("inner")).get("b1"), instanceOf(String.class));
}
}
| TestResolvingGrammarGenerator2 |
java | greenrobot__greendao | DaoCore/src/main/java/org/greenrobot/greendao/query/CursorQuery.java | {
"start": 1154,
"end": 1224
} | class ____ query will return results for.
* @author Markus
*/
public | the |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/float_/FloatAssert_isFinite_Test.java | {
"start": 798,
"end": 910
} | class ____ testing {@link FloatAssert#isFinite()} method.
*
* @author Jin Kwon <onacit_at_gmail.com>
*/
| for |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/AutoConfigurationSorterTests.java | {
"start": 11471,
"end": 11557
} | class ____ {
}
@AutoConfigureOrder(Ordered.LOWEST_PRECEDENCE)
static | OrderUnspecified |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTest.java | {
"start": 1296,
"end": 4968
} | class ____ extends ContextTestSupport {
@Test
public void testNoAdvised() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
// START SNIPPET: e1
@Test
public void testAdvised() throws Exception {
// advice the first route using the inlined route builder
AdviceWith.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
// intercept sending to mock:foo and do something else
interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
}
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
// END SNIPPET: e1
@Test
public void testAdvisedNoNewRoutesAllowed() throws Exception {
try {
AdviceWith.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:bar").to("mock:bar");
interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
}
});
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
// expected
}
}
@Test
public void testAdvisedThrowException() throws Exception {
AdviceWith.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
interceptSendToEndpoint("mock:foo").to("mock:advised").throwException(new IllegalArgumentException("Damn"));
}
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(0);
try {
template.sendBody("direct:start", "Hello World");
fail("Should have thrown exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertEquals("Damn", e.getCause().getMessage());
}
assertMockEndpointsSatisfied();
}
@Test
public void testAdvisedEmptyRoutes() throws Exception {
try {
AdviceWith.adviceWith(new RouteDefinition(), context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
}
});
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
// expected
}
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("mock:foo").to("mock:result");
}
};
}
}
| AdviceWithTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/jaxb/mapping/internal/UuidGeneratorStyleMarshalling.java | {
"start": 261,
"end": 537
} | class ____ {
public static UuidGenerator.Style fromXml(String name) {
return name == null ? null : UuidGenerator.Style.valueOf( name );
}
public static String toXml(UuidGenerator.Style style) {
return style == null ? null : style.name();
}
}
| UuidGeneratorStyleMarshalling |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/quote/resultsetmappings/ExplicitSqlResultSetMappingTest.java | {
"start": 853,
"end": 1876
} | class ____ {
private String queryString = null;
@BeforeEach
public void prepareTestData(SessionFactoryScope scope) {
Dialect dialect = scope.getSessionFactory().getJdbcServices().getDialect();
char open = dialect.openQuote();
char close = dialect.closeQuote();
queryString = "select t." + open + "NAME" + close + " as " + open + "QuotEd_nAMe" + close + " from " + open + "MY_ENTITY_TABLE" + close + " t";
scope.inTransaction(
s -> s.persist( new MyEntity( "mine" ) )
);
}
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncateMappedObjects();
}
@Test
public void testCompleteScalarAutoDiscovery(SessionFactoryScope scope) {
scope.inTransaction(
s -> s.createNativeQuery( queryString ).list()
);
}
@Test
public void testPartialScalarAutoDiscovery(SessionFactoryScope scope) {
scope.inTransaction(
s -> s.createNativeQuery( queryString, "explicitScalarResultSetMapping" ).list()
);
}
}
| ExplicitSqlResultSetMappingTest |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/token/JwtEncodingContext.java | {
"start": 1433,
"end": 2938
} | class ____ implements OAuth2TokenContext {
private final Map<Object, Object> context;
private JwtEncodingContext(Map<Object, Object> context) {
this.context = Collections.unmodifiableMap(new HashMap<>(context));
}
@SuppressWarnings("unchecked")
@Nullable
@Override
public <V> V get(Object key) {
return hasKey(key) ? (V) this.context.get(key) : null;
}
@Override
public boolean hasKey(Object key) {
Assert.notNull(key, "key cannot be null");
return this.context.containsKey(key);
}
/**
* Returns the {@link JwsHeader.Builder JWS headers} allowing the ability to add,
* replace, or remove.
* @return the {@link JwsHeader.Builder}
*/
public JwsHeader.Builder getJwsHeader() {
return get(JwsHeader.Builder.class);
}
/**
* Returns the {@link JwtClaimsSet.Builder claims} allowing the ability to add,
* replace, or remove.
* @return the {@link JwtClaimsSet.Builder}
*/
public JwtClaimsSet.Builder getClaims() {
return get(JwtClaimsSet.Builder.class);
}
/**
* Constructs a new {@link Builder} with the provided JWS headers and claims.
* @param jwsHeaderBuilder the JWS headers to initialize the builder
* @param claimsBuilder the claims to initialize the builder
* @return the {@link Builder}
*/
public static Builder with(JwsHeader.Builder jwsHeaderBuilder, JwtClaimsSet.Builder claimsBuilder) {
return new Builder(jwsHeaderBuilder, claimsBuilder);
}
/**
* A builder for {@link JwtEncodingContext}.
*/
public static final | JwtEncodingContext |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindowedDeserializer.java | {
"start": 1565,
"end": 6128
} | class ____ a windowed record. Must implement the {@link Serde} interface.
*/
public static final String WINDOWED_INNER_DESERIALIZER_CLASS = "windowed.inner.deserializer.class";
private final Logger log = LoggerFactory.getLogger(TimeWindowedDeserializer.class);
private Long windowSize;
private boolean isChangelogTopic;
private Deserializer<T> inner;
// Default constructor needed by Kafka
public TimeWindowedDeserializer() {
this(null, null);
}
public TimeWindowedDeserializer(final Deserializer<T> inner, final Long windowSize) {
this.inner = inner;
this.windowSize = windowSize;
this.isChangelogTopic = false;
}
public Long getWindowSize() {
return this.windowSize;
}
@Override
public void configure(final Map<String, ?> configs, final boolean isKey) {
configureWindowSizeMs(configs);
configureWindowInnerDeserializerClass(configs);
}
@Override
public Windowed<T> deserialize(final String topic, final byte[] data) {
WindowedSerdes.verifyInnerDeserializerNotNull(inner, this);
if (data == null || data.length == 0) {
return null;
}
// toStoreKeyBinary was used to serialize the data.
if (this.isChangelogTopic) {
return WindowKeySchema.fromStoreKey(data, windowSize, inner, topic);
}
// toBinary was used to serialize the data
return WindowKeySchema.from(data, windowSize, inner, topic);
}
@Override
public void close() {
if (inner != null) {
inner.close();
}
}
public void setIsChangelogTopic(final boolean isChangelogTopic) {
this.isChangelogTopic = isChangelogTopic;
}
// Only for testing
Deserializer<T> innerDeserializer() {
return inner;
}
private void configureWindowSizeMs(final Map<String, ?> configs) {
//check to see if the window size config is set and the window size is already set from the constructor
final Long configWindowSize;
if (configs.get(WINDOW_SIZE_MS_CONFIG) instanceof String) {
configWindowSize = Long.parseLong((String) configs.get(WINDOW_SIZE_MS_CONFIG));
} else {
configWindowSize = (Long) configs.get(WINDOW_SIZE_MS_CONFIG);
}
if (windowSize != null && configWindowSize != null) {
throw new IllegalArgumentException("Window size should not be set in both the time windowed deserializer constructor and the window.size.ms config");
} else if (windowSize == null && configWindowSize == null) {
throw new IllegalArgumentException("Window size needs to be set either through the time windowed deserializer " +
"constructor or the window.size.ms config but not both");
} else {
windowSize = windowSize == null ? configWindowSize : windowSize;
}
}
@SuppressWarnings({"deprecation", "unchecked"})
private void configureWindowInnerDeserializerClass(final Map<String, ?> configs) {
String deserializerConfigKey = WINDOWED_INNER_DESERIALIZER_CLASS;
String deserializerConfigValue = (String) configs.get(WINDOWED_INNER_DESERIALIZER_CLASS);
if (deserializerConfigValue == null) {
final String windowedInnerClassSerdeConfig = (String) configs.get(StreamsConfig.WINDOWED_INNER_CLASS_SERDE);
if (windowedInnerClassSerdeConfig != null) {
deserializerConfigKey = StreamsConfig.WINDOWED_INNER_CLASS_SERDE;
deserializerConfigValue = windowedInnerClassSerdeConfig;
log.warn("Config {} is deprecated. Please use {} instead.",
StreamsConfig.WINDOWED_INNER_CLASS_SERDE, WINDOWED_INNER_DESERIALIZER_CLASS);
}
}
Serde<T> windowedInnerDeserializerClass = null;
if (deserializerConfigValue != null) {
try {
windowedInnerDeserializerClass = Utils.newInstance(deserializerConfigValue, Serde.class);
} catch (final ClassNotFoundException e) {
throw new ConfigException(deserializerConfigKey, deserializerConfigValue,
"Serde class " + deserializerConfigValue + " could not be found.");
}
}
if (inner != null && deserializerConfigValue != null) {
if (!inner.getClass().getName().equals(windowedInnerDeserializerClass.deserializer().getClass().getName())) {
throw new IllegalArgumentException("Inner | of |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/temporal/OffsetDateTimeTest.java | {
"start": 2407,
"end": 10214
} | class ____ extends AbstractJavaTimeTypeTests<OffsetDateTime, OffsetDateTimeTest.EntityWithOffsetDateTime> {
public static List<Parameter<OffsetDateTime,DataImpl>> testData() {
return new ParametersBuilder( DialectContext.getDialect() )
// Not affected by any known bug
.add( 2017, 11, 6, 19, 19, 1, 0, "+10:00", ZONE_UTC_MINUS_8 )
.add( 2017, 11, 6, 19, 19, 1, 0, "+07:00", ZONE_UTC_MINUS_8 )
.add( 2017, 11, 6, 19, 19, 1, 0, "+01:30", ZONE_UTC_MINUS_8 )
.add( 2017, 11, 6, 19, 19, 1, 0, "+01:00", ZONE_UTC_MINUS_8 )
.add( 2017, 11, 6, 19, 19, 1, 0, "+00:30", ZONE_UTC_MINUS_8 )
.add( 2017, 11, 6, 19, 19, 1, 0, "-02:00", ZONE_UTC_MINUS_8 )
.add( 2017, 11, 6, 19, 19, 1, 0, "-06:00", ZONE_UTC_MINUS_8 )
.add( 2017, 11, 6, 19, 19, 1, 0, "-08:00", ZONE_UTC_MINUS_8 )
.add( 2017, 11, 6, 19, 19, 1, 0, "+10:00", ZONE_PARIS )
.add( 2017, 11, 6, 19, 19, 1, 0, "+07:00", ZONE_PARIS )
.add( 2017, 11, 6, 19, 19, 1, 0, "+01:30", ZONE_PARIS )
.add( 2017, 11, 6, 19, 19, 1, 500, "+01:00", ZONE_PARIS )
.add( 2017, 11, 6, 19, 19, 1, 0, "+01:00", ZONE_PARIS )
.add( 2017, 11, 6, 19, 19, 1, 0, "+00:30", ZONE_PARIS )
.add( 2017, 11, 6, 19, 19, 1, 0, "-02:00", ZONE_PARIS )
.add( 2017, 11, 6, 19, 19, 1, 0, "-06:00", ZONE_PARIS )
.add( 2017, 11, 6, 19, 19, 1, 0, "-08:00", ZONE_PARIS )
.skippedForDialects(
// MySQL/Mariadb cannot store values equal to epoch exactly, or less, in a timestamp.
Arrays.asList( MySQLDialect.class, MariaDBDialect.class ),
b -> b
// Not affected by any known bug
.add( 1970, 1, 1, 0, 0, 0, 0, "+01:00", ZONE_GMT )
.add( 1970, 1, 1, 0, 0, 0, 0, "+00:00", ZONE_GMT )
.add( 1970, 1, 1, 0, 0, 0, 0, "-01:00", ZONE_GMT )
.add( 1900, 1, 1, 0, 0, 0, 0, "+01:00", ZONE_GMT )
.add( 1900, 1, 1, 0, 0, 0, 0, "+00:00", ZONE_GMT )
.add( 1900, 1, 1, 0, 0, 0, 0, "-01:00", ZONE_GMT )
.add( 1900, 1, 1, 0, 0, 0, 0, "+00:00", ZONE_OSLO )
.add( 1900, 1, 1, 0, 9, 21, 0, "+00:09:21", ZONE_PARIS )
.add( 1900, 1, 1, 0, 19, 32, 0, "+00:19:32", ZONE_PARIS )
.add( 1900, 1, 1, 0, 19, 32, 0, "+00:19:32", ZONE_AMSTERDAM )
.add( 1900, 1, 1, 0, 9, 20, 0, "+00:09:21", ZONE_PARIS )
.add( 1900, 1, 1, 0, 19, 31, 0, "+00:19:32", ZONE_PARIS )
.add( 1900, 1, 1, 0, 19, 31, 0, "+00:19:32", ZONE_AMSTERDAM )
)
.skippedForDialects(
// MySQL/Mariadb cannot store values equal to epoch exactly, or less, in a timestamp.
dialect -> dialect instanceof MySQLDialect || dialect instanceof MariaDBDialect
|| dialect instanceof H2Dialect && ( (H2Dialect) dialect ).hasOddDstBehavior(),
b -> b
// Affected by HHH-13266 (JDK-8061577)
.add( 1892, 1, 1, 0, 0, 0, 0, "+00:00", ZONE_OSLO )
)
.skippedForDialects(
// MySQL/Mariadb/Sybase cannot store dates in 1600 in a timestamp.
dialect -> dialect instanceof MySQLDialect || dialect instanceof MariaDBDialect || dialect instanceof SybaseDialect
|| dialect instanceof H2Dialect && ( (H2Dialect) dialect ).hasOddDstBehavior(),
b -> b
.add( 1600, 1, 1, 0, 0, 0, 0, "+00:19:32", ZONE_AMSTERDAM )
)
// HHH-13379: DST end (where Timestamp becomes ambiguous, see JDK-4312621)
// => This used to work correctly in 5.4.1.Final and earlier
.skippedForDialects(
dialect -> dialect instanceof H2Dialect && ( (H2Dialect) dialect ).hasOddDstBehavior(),
b -> b.add( 2018, 10, 28, 2, 0, 0, 0, "+01:00", ZONE_PARIS )
.add( 2018, 4, 1, 2, 0, 0, 0, "+12:00", ZONE_AUCKLAND )
)
// => This has never worked correctly, unless the JDBC timezone was set to UTC
.withForcedJdbcTimezone( "UTC", b -> b
.add( 2018, 10, 28, 2, 0, 0, 0, "+02:00", ZONE_PARIS )
.add( 2018, 4, 1, 2, 0, 0, 0, "+13:00", ZONE_AUCKLAND )
)
// => Also test DST start, just in case
.add( 2018, 3, 25, 2, 0, 0, 0, "+01:00", ZONE_PARIS )
.add( 2018, 3, 25, 3, 0, 0, 0, "+02:00", ZONE_PARIS )
.add( 2018, 9, 30, 2, 0, 0, 0, "+12:00", ZONE_AUCKLAND )
.add( 2018, 9, 30, 3, 0, 0, 0, "+13:00", ZONE_AUCKLAND )
// => Also test dates around 1905-01-01, because the code behaves differently before and after 1905
.add( 1904, 12, 31, 23, 59, 59, 999_999_999, "-01:00", ZONE_PARIS )
.add( 1904, 12, 31, 23, 59, 59, 999_999_999, "+00:00", ZONE_PARIS )
.add( 1904, 12, 31, 23, 59, 59, 999_999_999, "+01:00", ZONE_PARIS )
.add( 1905, 1, 1, 0, 0, 0, 0, "-01:00", ZONE_PARIS )
.add( 1905, 1, 1, 0, 0, 0, 0, "+00:00", ZONE_PARIS )
.add( 1905, 1, 1, 0, 0, 0, 0, "+01:00", ZONE_PARIS )
.build();
}
private final Parameter<OffsetDateTime,DataImpl> testParam;
public OffsetDateTimeTest(Parameter<OffsetDateTime,DataImpl> testParam) {
super( testParam.env() );
this.testParam = testParam;
}
@Override
public StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder builder) {
builder.applySetting( TIMEZONE_DEFAULT_STORAGE, NORMALIZE );
return super.produceServiceRegistry( builder );
}
@Override
protected Class<EntityWithOffsetDateTime> getEntityType() {
return EntityWithOffsetDateTime.class;
}
@Override
protected EntityWithOffsetDateTime createEntityForHibernateWrite(int id) {
return new EntityWithOffsetDateTime( id, getOriginalOffsetDateTime() );
}
private OffsetDateTime getOriginalOffsetDateTime() {
return testParam.data().makeValue();
}
@Override
protected OffsetDateTime getExpectedPropertyValueAfterHibernateRead() {
return getOriginalOffsetDateTime().atZoneSameInstant( ZoneId.systemDefault() ).toOffsetDateTime();
}
@Override
protected OffsetDateTime getActualPropertyValue(EntityWithOffsetDateTime entity) {
return entity.value;
}
@Override
protected void bindJdbcValue(
PreparedStatement statement,
int parameterIndex,
SessionFactoryScope factoryScope) throws SQLException {
statement.setTimestamp( parameterIndex, getExpectedJdbcValueAfterHibernateWrite() );
}
@Override
protected Timestamp getExpectedJdbcValueAfterHibernateWrite() {
LocalDateTime dateTimeInDefaultTimeZone = getOriginalOffsetDateTime().atZoneSameInstant( ZoneId.systemDefault() )
.toLocalDateTime();
return new Timestamp(
dateTimeInDefaultTimeZone.getYear() - 1900, dateTimeInDefaultTimeZone.getMonthValue() - 1,
dateTimeInDefaultTimeZone.getDayOfMonth(),
dateTimeInDefaultTimeZone.getHour(), dateTimeInDefaultTimeZone.getMinute(),
dateTimeInDefaultTimeZone.getSecond(),
dateTimeInDefaultTimeZone.getNano()
);
}
@Override
protected Object extractJdbcValue(
ResultSet resultSet,
int columnIndex,
SessionFactoryScope factoryScope) throws SQLException {
return resultSet.getTimestamp( columnIndex );
}
@Test
public void testRetrievingEntityByOffsetDateTime(SessionFactoryScope factoryScope) {
Timezones.withDefaultTimeZone( testParam.env(), () -> {
factoryScope.inTransaction( session -> {
session.persist( new EntityWithOffsetDateTime( 1, getOriginalOffsetDateTime() ) );
} );
Consumer<OffsetDateTime> checkOneMatch = expected -> factoryScope.inSession( s -> {
var list = s.createQuery( "from EntityWithOffsetDateTime o where o.value = :date" )
.setParameter( "date", expected, StandardBasicTypes.OFFSET_DATE_TIME )
.list();
MatcherAssert.assertThat( list.size(), is( 1 ) );
} );
checkOneMatch.accept( getOriginalOffsetDateTime() );
checkOneMatch.accept( getExpectedPropertyValueAfterHibernateRead() );
checkOneMatch.accept( getExpectedPropertyValueAfterHibernateRead().withOffsetSameInstant( ZoneOffset.UTC ) );
} );
}
@SuppressWarnings({"FieldCanBeLocal", "unused"})
@Entity(name = "EntityWithOffsetDateTime")
@Table(name = ENTITY_TBL_NAME)
public static | OffsetDateTimeTest |
java | apache__camel | components/camel-micrometer/src/test/java/org/apache/camel/component/micrometer/TimerRouteTest.java | {
"start": 2085,
"end": 5443
} | class ____ extends CamelSpringTestSupport {
private static final long DELAY = 20L;
@EndpointInject("mock:out")
private MockEndpoint endpoint;
@Produce("direct:in-1")
private ProducerTemplate producer1;
@Produce("direct:in-2")
private ProducerTemplate producer2;
@Produce("direct:in-3")
private ProducerTemplate producer3;
@BindToRegistry(METRICS_REGISTRY_NAME)
private MeterRegistry registry = new SimpleMeterRegistry();
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:in-1")
.setHeader(HEADER_METRIC_NAME, constant("B"))
.to("micrometer:timer:A?action=start")
.delay(DELAY)
.setHeader(HEADER_METRIC_NAME, constant("B"))
.to("micrometer:timer:A?action=stop")
.to("mock:out");
from("direct:in-2")
.setHeader(HEADER_TIMER_ACTION, constant(MicrometerTimerAction.start))
.to("micrometer:timer:A")
.delay(DELAY)
.setHeader(HEADER_TIMER_ACTION, constant(MicrometerTimerAction.stop))
.to("micrometer:timer:A")
.to("mock:out");
from("direct:in-3")
.to("micrometer:timer:C?action=start")
.delay(DELAY)
.to("micrometer:timer:C?action=stop&tags.a=${body}")
.to("mock:out");
}
};
}
@Override
protected AbstractApplicationContext createApplicationContext() {
return new AnnotationConfigApplicationContext();
}
@Override
public void doPostTearDown() {
endpoint.reset();
}
@Test
public void testOverrideMetricsName() throws Exception {
Object body = new Object();
endpoint.expectedBodiesReceived(body);
producer1.sendBody(body);
Timer timer = registry.find("B").timer();
assertEquals(1L, timer.count());
assertTrue(timer.max(TimeUnit.MILLISECONDS) > 0.0D);
endpoint.assertIsSatisfied();
}
@Test
public void testOverrideNoAction() throws Exception {
Object body = new Object();
endpoint.expectedBodiesReceived(body);
producer2.sendBody(body);
Timer timer = registry.find("A").timer();
assertEquals(1L, timer.count());
assertTrue(timer.max(TimeUnit.MILLISECONDS) > 0.0D);
endpoint.assertIsSatisfied();
}
@Test
public void testNormal() throws Exception {
int count = 10;
String body = "Hello";
endpoint.expectedMessageCount(count);
for (int i = 0; i < count; i++) {
producer3.sendBody(body);
}
Timer timer = registry.find("C").timer();
assertEquals(count, timer.count());
assertTrue(timer.max(TimeUnit.MILLISECONDS) > DELAY);
assertTrue(timer.mean(TimeUnit.MILLISECONDS) > DELAY);
assertTrue(timer.totalTime(TimeUnit.MILLISECONDS) > DELAY * count);
assertEquals(body, timer.getId().getTag("a"));
endpoint.assertIsSatisfied();
}
}
| TimerRouteTest |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/CopyFromLocalOperation.java | {
"start": 2951,
"end": 14560
} | class ____ extends ExecutingStoreOperation<Void> {
/**
* Largest N files to be uploaded first.
*/
private static final int LARGEST_N_FILES = 5;
private static final Logger LOG = LoggerFactory.getLogger(
CopyFromLocalOperation.class);
/**
* Callbacks to be used by this operation for external / IO actions.
*/
private final CopyFromLocalOperationCallbacks callbacks;
/**
* Delete source after operation finishes.
*/
private final boolean deleteSource;
/**
* Overwrite destination files / folders.
*/
private final boolean overwrite;
/**
* Source path to file / directory.
*/
private final Path source;
/**
* Async operations executor.
*/
private final ListeningExecutorService executor;
/**
* Destination path.
*/
private Path destination;
/**
* Destination file status.
*/
private FileStatus destStatus;
public CopyFromLocalOperation(
final StoreContext storeContext,
Path source,
Path destination,
boolean deleteSource,
boolean overwrite,
CopyFromLocalOperationCallbacks callbacks) {
super(storeContext);
this.callbacks = callbacks;
this.deleteSource = deleteSource;
this.overwrite = overwrite;
this.source = source.toUri().getScheme() == null ? new Path("file://", source) : source;
this.destination = destination;
// Capacity of 1 is a safe default for now since transfer manager can also
// spawn threads when uploading bigger files.
this.executor = MoreExecutors.listeningDecorator(
storeContext.createThrottledExecutor(1)
);
}
/**
* Executes the {@link CopyFromLocalOperation}.
*
* @throws IOException - if there are any failures with upload or deletion
* of files. Check {@link CopyFromLocalOperationCallbacks} for specifics.
* @throws PathExistsException - if the path exists and no overwrite flag
* is set OR if the source is file and destination is a directory
*/
@Override
@Retries.RetryTranslated
public Void execute()
throws IOException, PathExistsException {
LOG.debug("Copying local file from {} to {}", source, destination);
File sourceFile = callbacks.pathToLocalFile(source);
updateDestStatus(destination);
// Handles bar/ -> foo/ => foo/bar and bar/ -> foo/bar/ => foo/bar/bar
if (getDestStatus().isPresent() && getDestStatus().get().isDirectory()
&& sourceFile.isDirectory()) {
destination = new Path(destination, sourceFile.getName());
LOG.debug("Destination updated to: {}", destination);
updateDestStatus(destination);
}
checkSource(sourceFile);
checkDestination(destination, sourceFile, overwrite);
uploadSourceFromFS();
if (deleteSource) {
callbacks.deleteLocal(source, true);
}
return null;
}
/**
* Does a {@link CopyFromLocalOperationCallbacks#getFileStatus(Path)}
* operation on the provided destination and updates the internal status of
* destStatus field.
*
* @param dest - destination Path
* @throws IOException if getFileStatus fails
*/
private void updateDestStatus(Path dest) throws IOException {
try {
destStatus = callbacks.getFileStatus(dest);
} catch (FileNotFoundException e) {
destStatus = null;
}
}
/**
* Starts async upload operations for files. Creating an empty directory
* classifies as a "file upload".
*
* Check {@link CopyFromLocalOperation} for details on the order of
* operations.
*
* @throws IOException - if listing or upload fail
*/
private void uploadSourceFromFS() throws IOException {
RemoteIterator<LocatedFileStatus> localFiles = listFilesAndDirs(source);
List<CompletableFuture<Void>> activeOps = new ArrayList<>();
// After all files are traversed, this set will contain only emptyDirs
Set<Path> emptyDirs = new HashSet<>();
List<UploadEntry> entries = new ArrayList<>();
while (localFiles.hasNext()) {
LocatedFileStatus sourceFile = localFiles.next();
Path sourceFilePath = sourceFile.getPath();
// Directory containing this file / directory isn't empty
emptyDirs.remove(sourceFilePath.getParent());
if (sourceFile.isDirectory()) {
emptyDirs.add(sourceFilePath);
continue;
}
Path destPath = getFinalPath(sourceFilePath);
// UploadEntries: have a destination path, a file size
entries.add(new UploadEntry(
sourceFilePath,
destPath,
sourceFile.getLen()));
}
if (localFiles instanceof Closeable) {
IOUtils.closeStream((Closeable) localFiles);
}
// Sort all upload entries based on size
entries.sort(new ReverseComparator(new UploadEntry.SizeComparator()));
// Take only top most N entries and upload
final int sortedUploadsCount = Math.min(LARGEST_N_FILES, entries.size());
List<UploadEntry> markedForUpload = new ArrayList<>();
for (int uploadNo = 0; uploadNo < sortedUploadsCount; uploadNo++) {
UploadEntry uploadEntry = entries.get(uploadNo);
File file = callbacks.pathToLocalFile(uploadEntry.source);
activeOps.add(submitUpload(file, uploadEntry));
markedForUpload.add(uploadEntry);
}
// No files found, it's empty source directory
if (entries.isEmpty()) {
emptyDirs.add(source);
}
// Shuffle all remaining entries and upload them
entries.removeAll(markedForUpload);
Collections.shuffle(entries);
for (UploadEntry uploadEntry : entries) {
File file = callbacks.pathToLocalFile(uploadEntry.source);
activeOps.add(submitUpload(file, uploadEntry));
}
for (Path emptyDir : emptyDirs) {
Path emptyDirPath = getFinalPath(emptyDir);
activeOps.add(submitCreateEmptyDir(emptyDirPath));
}
waitForCompletion(activeOps);
}
/**
* Async call to create an empty directory.
*
* @param dir directory path
* @return the submitted future
*/
private CompletableFuture<Void> submitCreateEmptyDir(Path dir) {
return submit(executor, callableWithinAuditSpan(
getAuditSpan(), () -> {
callbacks.createEmptyDir(dir, getStoreContext());
return null;
}
));
}
/**
* Async call to upload a file.
*
* @param file - File to be uploaded
* @param uploadEntry - Upload entry holding the source and destination
* @return the submitted future
*/
private CompletableFuture<Void> submitUpload(
File file,
UploadEntry uploadEntry) {
return submit(executor, callableWithinAuditSpan(
getAuditSpan(), () -> {
callbacks.copyLocalFileFromTo(
file,
uploadEntry.source,
uploadEntry.destination);
return null;
}
));
}
/**
* Checks the source before upload starts.
*
* @param src - Source file
* @throws FileNotFoundException - if the file isn't found
*/
private void checkSource(File src)
throws FileNotFoundException {
if (!src.exists()) {
throw new FileNotFoundException("No file: " + src.getPath());
}
}
/**
* Check the destination path and make sure it's compatible with the source,
* i.e. source and destination are both files / directories.
*
* @param dest - Destination path
* @param src - Source file
* @param overwrite - Should source overwrite destination
* @throws PathExistsException - If the destination path exists and no
* overwrite flag is set
* @throws FileAlreadyExistsException - If source is file and destination is path
*/
private void checkDestination(
Path dest,
File src,
boolean overwrite) throws PathExistsException,
FileAlreadyExistsException {
if (!getDestStatus().isPresent()) {
return;
}
if (src.isDirectory() && getDestStatus().get().isFile()) {
throw new FileAlreadyExistsException(
"Source '" + src.getPath() + "' is directory and " +
"destination '" + dest + "' is file");
}
if (!overwrite) {
throw new PathExistsException(dest + " already exists");
}
}
/**
* Get the final path of a source file with regards to its destination.
*
* @param src - source path
* @return - the final path for the source file to be uploaded to
* @throws PathIOException - if a relative path can't be created
*/
private Path getFinalPath(Path src) throws PathIOException {
URI currentSrcUri = src.toUri();
URI relativeSrcUri = source.toUri().relativize(currentSrcUri);
if (relativeSrcUri.equals(currentSrcUri)) {
throw new PathIOException("Cannot get relative path for URI:"
+ relativeSrcUri);
}
Optional<FileStatus> status = getDestStatus();
if (!relativeSrcUri.getPath().isEmpty()) {
return new Path(destination, relativeSrcUri.getPath());
} else if (status.isPresent() && status.get().isDirectory()) {
// file to dir
return new Path(destination, src.getName());
} else {
// file to file
return destination;
}
}
private Optional<FileStatus> getDestStatus() {
return Optional.ofNullable(destStatus);
}
/**
* {@link RemoteIterator} which lists all of the files and directories for
* a given path. It's strikingly similar to
* {@link org.apache.hadoop.fs.LocalFileSystem#listFiles(Path, boolean)}
* however with the small addition that it includes directories.
*
* @param path - Path to list files and directories from
* @return - an iterator
* @throws IOException - if listing of a path file fails
*/
private RemoteIterator<LocatedFileStatus> listFilesAndDirs(Path path)
throws IOException {
return new RemoteIterator<LocatedFileStatus>() {
private final Stack<RemoteIterator<LocatedFileStatus>> iterators =
new Stack<>();
private RemoteIterator<LocatedFileStatus> current =
callbacks.listLocalStatusIterator(path);
private LocatedFileStatus curFile;
@Override
public boolean hasNext() throws IOException {
while (curFile == null) {
if (current.hasNext()) {
handleFileStat(current.next());
} else if (!iterators.empty()) {
current = iterators.pop();
} else {
return false;
}
}
return true;
}
/**
* Process the input stat.
* If it is a file or directory return the file stat.
* If it is a directory, traverse the directory;
* @param stat input status
* @throws IOException if any IO error occurs
*/
private void handleFileStat(LocatedFileStatus stat)
throws IOException {
if (stat.isFile()) { // file
curFile = stat;
} else { // directory
curFile = stat;
iterators.push(current);
current = callbacks.listLocalStatusIterator(stat.getPath());
}
}
@Override
public LocatedFileStatus next() throws IOException {
if (hasNext()) {
LocatedFileStatus result = curFile;
curFile = null;
return result;
}
throw new NoSuchElementException("No more entry in "
+ path);
}
};
}
/**
* <p>Represents an entry for a file to be moved.</p>
* <p>
* Helpful with sorting files by their size and keeping track of path
* information for the upload.
* </p>
*/
private static final | CopyFromLocalOperation |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/EqualsIncompatibleTypeTest.java | {
"start": 12796,
"end": 12939
} | interface ____<
E extends Entity<E, K, V, V2>,
K extends EntityKey<K>,
V extends Enum<V>,
V2 extends Enum<V2>> {}
| Entity |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/SizeGreaterThanOrEqualsZero.java | {
"start": 2611,
"end": 2706
} | class ____ extends BugChecker implements BinaryTreeMatcher {
private | SizeGreaterThanOrEqualsZero |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/exception/ExceptionUtilsTest.java | {
"start": 3195,
"end": 3562
} | class ____ extends Exception {
private static final long serialVersionUID = 1L;
@SuppressWarnings("unused")
public void getTargetException() {
// noop
}
}
// Temporary classes to allow the nested exception code to be removed
// prior to a rewrite of this test class.
private static final | ExceptionWithoutCause |
java | dropwizard__dropwizard | dropwizard-jetty/src/main/java/io/dropwizard/jetty/HttpConnectorFactory.java | {
"start": 21791,
"end": 22033
} | class ____ extends StringMethodDeserializer<CookieCompliance> {
public CookieComplianceDeserializer() {
super(CookieCompliance.class, CookieCompliance::valueOf);
}
}
private static | CookieComplianceDeserializer |
java | google__dagger | hilt-core/main/java/dagger/hilt/internal/definecomponent/DefineComponentNoParent.java | {
"start": 660,
"end": 784
} | class ____ by {@link dagger.hilt.DefineComponent#parent()} as the default type when no parent
* is given.
*/
public final | used |
java | qos-ch__slf4j | slf4j-reload4j/src/test/java/org/slf4j/reload4j/testHarness/RecursiveAppender.java | {
"start": 1470,
"end": 3039
} | class ____ extends AppenderSkeleton {
int diff = new Random().nextInt();
int activationDelay = 0;
String loggerName = "org.slf4j.impl.RecursiveAppender" + diff;
public List<LoggingEvent> events = new ArrayList<>();
public RecursiveAppender() {
System.out.println("XXXXXXX entering RecursiveAppender constructor");
Logger logger = LoggerFactory.getLogger(loggerName);
logger.info("Calling a logger in the constructor");
System.out.println("exiting RecursiveAppender constructor");
}
protected void append(LoggingEvent e) {
events.add(e);
}
public void close() {
}
public boolean requiresLayout() {
return false;
}
@Override
public void activateOptions() {
System.out.println("entering RecursiveAppender.activateOptions");
if (activationDelay > 0) {
Logger logger = LoggerFactory.getLogger(loggerName);
logger.info("About to wait {} millis", activationDelay);
try {
Thread.sleep(activationDelay);
} catch (InterruptedException e) {
e.printStackTrace();
}
logger.info("Done waiting {} millis", activationDelay);
}
super.activateOptions();
System.out.println("exiting RecursiveAppender.activateOptions");
}
public int getActivationDelay() {
return activationDelay;
}
public void setActivationDelay(int activationDelay) {
this.activationDelay = activationDelay;
}
}
| RecursiveAppender |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java | {
"start": 22459,
"end": 23902
} | class ____ extends S3GuardTool {
public static final String NAME = "uploads";
public static final String ABORT = "abort";
public static final String LIST = "list";
public static final String EXPECT = "expect";
public static final String FORCE = "force";
public static final String PURPOSE = "list or abort pending " +
"multipart uploads";
private static final String USAGE = NAME + " [OPTIONS] " +
"s3a://BUCKET[/path]\n"
+ "\t" + PURPOSE + "\n\n"
+ "Common options:\n"
+ " (-" + LIST + " | -" + EXPECT + " <num-uploads> | -" + ABORT
+ ") [-" + VERBOSE + "] "
+ "[<age-options>] [-force]\n"
+ "\t - Under given path, list or delete all uploads," +
" or only those \n"
+ "older than specified by <age-options>\n"
+ "<age-options> are any combination of the integer-valued options:\n"
+ "\t" + AGE_OPTIONS_USAGE + "\n"
+ "-" + EXPECT + " is similar to list, except no output is printed,\n"
+ "\tbut the exit code will be an error if the provided number\n"
+ "\tis different that the number of uploads found by the command.\n"
+ "-" + FORCE + " option prevents the \"Are you sure\" prompt when\n"
+ "\tusing -" + ABORT;
/** Constant used for output and parsed by tests. */
public static final String TOTAL = "Total";
/** Runs in one of three modes. */
private | Uploads |
java | spring-projects__spring-security | access/src/main/java/org/springframework/security/web/access/expression/WebExpressionVoter.java | {
"start": 1522,
"end": 3518
} | class ____ implements AccessDecisionVoter<FilterInvocation> {
private final Log logger = LogFactory.getLog(getClass());
private SecurityExpressionHandler<FilterInvocation> expressionHandler = new DefaultWebSecurityExpressionHandler();
@Override
public int vote(Authentication authentication, FilterInvocation filterInvocation,
Collection<ConfigAttribute> attributes) {
Assert.notNull(authentication, "authentication must not be null");
Assert.notNull(filterInvocation, "filterInvocation must not be null");
Assert.notNull(attributes, "attributes must not be null");
WebExpressionConfigAttribute webExpressionConfigAttribute = findConfigAttribute(attributes);
if (webExpressionConfigAttribute == null) {
this.logger
.trace("Abstained since did not find a config attribute of instance WebExpressionConfigAttribute");
return ACCESS_ABSTAIN;
}
EvaluationContext ctx = webExpressionConfigAttribute.postProcess(
this.expressionHandler.createEvaluationContext(authentication, filterInvocation), filterInvocation);
boolean granted = ExpressionUtils.evaluateAsBoolean(webExpressionConfigAttribute.getAuthorizeExpression(), ctx);
if (granted) {
return ACCESS_GRANTED;
}
this.logger.trace("Voted to deny authorization");
return ACCESS_DENIED;
}
private @Nullable WebExpressionConfigAttribute findConfigAttribute(Collection<ConfigAttribute> attributes) {
for (ConfigAttribute attribute : attributes) {
if (attribute instanceof WebExpressionConfigAttribute) {
return (WebExpressionConfigAttribute) attribute;
}
}
return null;
}
@Override
public boolean supports(ConfigAttribute attribute) {
return attribute instanceof WebExpressionConfigAttribute;
}
@Override
public boolean supports(Class<?> clazz) {
return FilterInvocation.class.isAssignableFrom(clazz);
}
public void setExpressionHandler(SecurityExpressionHandler<FilterInvocation> expressionHandler) {
this.expressionHandler = expressionHandler;
}
}
| WebExpressionVoter |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/geo/SpatialPointTests.java | {
"start": 3582,
"end": 3621
} | class ____.
*/
private static | name |
java | apache__kafka | trogdor/src/main/java/org/apache/kafka/trogdor/rest/CoordinatorShutdownRequest.java | {
"start": 1017,
"end": 1344
} | class ____ extends Message {
private final boolean stopAgents;
@JsonCreator
public CoordinatorShutdownRequest(@JsonProperty("stopAgents") boolean stopAgents) {
this.stopAgents = stopAgents;
}
@JsonProperty
public boolean stopAgents() {
return stopAgents;
}
}
| CoordinatorShutdownRequest |
java | quarkusio__quarkus | integration-tests/test-extension/tests/src/test/java/io/quarkus/it/testsupport/commandmode/QuarkusMainTestWithTestProfileTestCase.java | {
"start": 598,
"end": 856
} | class ____ {
@Test
@Launch(value = {})
public void testLaunchCommand(LaunchResult result) {
assertThat(result.getOutput())
.contains("The bean is mocked value");
}
public static | QuarkusMainTestWithTestProfileTestCase |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java | {
"start": 2133,
"end": 4709
} | class ____ {
private static final Logger LOG = WebHdfsHandler.LOG;
static DefaultFullHttpResponse exceptionCaught(Throwable cause) {
Exception e = cause instanceof Exception ? (Exception) cause : new Exception(cause);
if (LOG.isTraceEnabled()) {
LOG.trace("GOT EXCEPTION", e);
}
//Convert exception
if (e instanceof ParamException) {
final ParamException paramexception = (ParamException)e;
e = new IllegalArgumentException("Invalid value for webhdfs parameter \""
+ paramexception.getParameterName() + "\": "
+ e.getCause().getMessage(), e);
} else if (e instanceof ContainerException || e instanceof SecurityException) {
e = toCause(e);
} else if (e instanceof RemoteException) {
e = ((RemoteException)e).unwrapRemoteException();
}
//Map response status
final HttpResponseStatus s;
if (e instanceof SecurityException) {
s = FORBIDDEN;
} else if (e instanceof AuthorizationException) {
s = FORBIDDEN;
} else if (e instanceof FileNotFoundException) {
s = NOT_FOUND;
} else if (e instanceof IOException) {
s = FORBIDDEN;
} else if (e instanceof UnsupportedOperationException) {
s = BAD_REQUEST;
} else if (e instanceof IllegalArgumentException) {
s = BAD_REQUEST;
} else {
LOG.warn("INTERNAL_SERVER_ERROR", e);
s = INTERNAL_SERVER_ERROR;
}
final byte[] js = JsonUtil.toJsonString(e).getBytes(StandardCharsets.UTF_8);
DefaultFullHttpResponse resp =
new DefaultFullHttpResponse(HTTP_1_1, s, Unpooled.wrappedBuffer(js));
resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8);
resp.headers().set(CONTENT_LENGTH, js.length);
return resp;
}
private static Exception toCause(Exception e) {
final Throwable t = e.getCause();
if (e instanceof SecurityException) {
// For the issue reported in HDFS-6475, if SecurityException's cause
// is InvalidToken, and the InvalidToken's cause is StandbyException,
// return StandbyException; Otherwise, leave the exception as is,
// since they are handled elsewhere. See HDFS-6588.
if (t != null && t instanceof SecretManager.InvalidToken) {
final Throwable t1 = t.getCause();
if (t1 != null && t1 instanceof StandbyException) {
e = (StandbyException)t1;
}
}
} else {
if (t != null && t instanceof Exception) {
e = (Exception)t;
}
}
return e;
}
}
| ExceptionHandler |
java | quarkusio__quarkus | independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/config/RegistryNonPlatformExtensionsConfig.java | {
"start": 187,
"end": 434
} | interface ____ extends RegistryArtifactConfig {
@Override
default RegistryNonPlatformExtensionsConfig.Mutable mutable() {
return new RegistryNonPlatformExtensionsConfigImpl.Builder(this);
}
| RegistryNonPlatformExtensionsConfig |
java | spring-projects__spring-framework | spring-aop/src/test/java/org/springframework/aop/aspectj/annotation/AspectProxyFactoryTests.java | {
"start": 5275,
"end": 5383
} | enum ____ implements MyInterface {
C, D
}
@Aspect
@SuppressWarnings("serial")
public static | MyOtherEnum |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/core/v2/metadata/InstanceMetadata.java | {
"start": 844,
"end": 1742
} | class ____ implements Serializable {
private static final long serialVersionUID = -8477858617353459226L;
/**
* instance weight.
*/
private double weight = 1.0D;
/**
* If instance is enabled to accept request.
*/
private boolean enabled = true;
private Map<String, Object> extendData = new ConcurrentHashMap<>(1);
public double getWeight() {
return weight;
}
public void setWeight(double weight) {
this.weight = weight;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public Map<String, Object> getExtendData() {
return extendData;
}
public void setExtendData(Map<String, Object> extendData) {
this.extendData = extendData;
}
}
| InstanceMetadata |
java | quarkusio__quarkus | integration-tests/reactive-messaging-kafka/src/main/java/io/quarkus/it/kafka/FruitProducer.java | {
"start": 206,
"end": 619
} | class ____ {
@Outgoing("fruits-out")
public Multi<Fruit> generateFruits() {
return Multi.createFrom().items(
Fruit.Fruits.BERRY.create("strawberry"),
Fruit.Fruits.POME.create("apple"),
Fruit.Fruits.TROPICAL.create("banana"),
Fruit.Fruits.STONE.create("peach"),
Fruit.Fruits.CITRUS.create("orange"));
}
}
| FruitProducer |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/resource/basic/resource/ResourceLocatorSubresource.java | {
"start": 420,
"end": 3024
} | class ____ {
private static final Logger LOG = Logger.getLogger(ResourceLocatorSubresource.class);
@GET
public String doGet(@Context UriInfo uri) {
LOG.info("Uri Ancesstors for Subresource.doGet():");
List<String> matchedURIs = uri.getMatchedURIs();
Assertions.assertEquals(2, matchedURIs.size());
Assertions.assertEquals("base/1/resources", matchedURIs.get(0));
Assertions.assertEquals("", matchedURIs.get(1));
for (String ancestor : matchedURIs)
LOG.debug(" " + ancestor);
LOG.info("Uri Ancestors Object for Subresource.doGet():");
Assertions.assertEquals(2, uri.getMatchedResources().size());
Assertions.assertEquals(ResourceLocatorSubresource.class, uri.getMatchedResources().get(0).getClass());
Assertions.assertEquals(ResourceLocatorBaseResource.class, uri.getMatchedResources().get(1).getClass());
for (Object ancestor : uri.getMatchedResources())
LOG.debugv(" {0}", ancestor.getClass().getName());
return this.getClass().getName();
}
@Path("/subresource2")
public Object getSubresource2(@Context UriInfo uri) {
LOG.info("Uri Ancestors for Subresource.getSubresource2():");
List<String> matchedURIs = uri.getMatchedURIs();
Assertions.assertEquals(3, matchedURIs.size());
Assertions.assertEquals("base/1/resources/subresource2", matchedURIs.get(0));
Assertions.assertEquals("base/1/resources", matchedURIs.get(1));
Assertions.assertEquals("", matchedURIs.get(2));
for (String ancestor : matchedURIs)
LOG.debug(" " + ancestor);
LOG.info("Uri Ancestors Object for Subresource.getSubresource2():");
Assertions.assertEquals(2, uri.getMatchedResources().size());
Assertions.assertEquals(ResourceLocatorSubresource.class, uri.getMatchedResources().get(0).getClass());
Assertions.assertEquals(ResourceLocatorBaseResource.class, uri.getMatchedResources().get(1).getClass());
for (Object ancestor : uri.getMatchedResources())
LOG.debugv(" {0}", ancestor.getClass().getName());
return new ResourceLocatorSubresource2();
}
@GET
@Path("/subresource3")
public String getValueFromBeanParam(@BeanParam Params params) {
return params.param + " and " + params.value;
}
@GET
@Path("/subresource4")
public String getValueFromGenericBeanParam(@BeanParam ParamsWithGeneric<String> params) {
return params.param + " and " + params.value;
}
public static | ResourceLocatorSubresource |
java | apache__camel | components/camel-lucene/src/main/java/org/apache/camel/component/lucene/LuceneIndexProducer.java | {
"start": 972,
"end": 1868
} | class ____ extends DefaultProducer {
LuceneConfiguration config;
LuceneIndexer indexer;
public LuceneIndexProducer(Endpoint endpoint, LuceneConfiguration config, LuceneIndexer indexer) {
super(endpoint);
this.config = config;
this.indexer = indexer;
}
@Override
public void doStop() throws Exception {
this.indexer.getNiofsDirectory().close();
super.doStop();
}
@Override
public void process(Exchange exchange) throws Exception {
indexer.index(exchange);
}
public LuceneConfiguration getConfig() {
return config;
}
public void setConfig(LuceneConfiguration config) {
this.config = config;
}
public LuceneIndexer getIndexer() {
return indexer;
}
public void setIndexer(LuceneIndexer indexer) {
this.indexer = indexer;
}
}
| LuceneIndexProducer |
java | apache__camel | components/camel-datasonnet/src/main/java/org/apache/camel/language/datasonnet/DatasonnetConstants.java | {
"start": 863,
"end": 1100
} | class ____ {
public static final String BODY_MEDIATYPE = "CamelDatasonnetBodyMediaType";
public static final String OUTPUT_MEDIATYPE = "CamelDatasonnetOutputMediaType";
private DatasonnetConstants() {
}
}
| DatasonnetConstants |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ImmutableMemberCollectionTest.java | {
"start": 5686,
"end": 6284
} | class ____ {
private static final Set<String> mySet;
static {
mySet = new HashSet<>();
}
private static void myFunc() {
mySet.add("myString");
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void setInit_mutableTypeInStaticBlock_passedToAnotherFunction_doesNothing() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import java.util.Set;
import java.util.HashSet;
| Test |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvtVO/ae/Data.java | {
"start": 112,
"end": 159
} | class ____ {
public List<Area> areaList;
}
| Data |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestEnhancedByteBufferAccess.java | {
"start": 3888,
"end": 12041
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestEnhancedByteBufferAccess.class.getName());
static private TemporarySocketDirectory sockDir;
static private CacheManipulator prevCacheManipulator;
@BeforeAll
public static void init() {
sockDir = new TemporarySocketDirectory();
DomainSocket.disableBindPathValidation();
prevCacheManipulator = NativeIO.POSIX.getCacheManipulator();
NativeIO.POSIX.setCacheManipulator(new CacheManipulator() {
@Override
public void mlock(String identifier,
ByteBuffer mmap, long length) throws IOException {
LOG.info("mlocking " + identifier);
}
});
}
@AfterAll
public static void teardown() {
// Restore the original CacheManipulator
NativeIO.POSIX.setCacheManipulator(prevCacheManipulator);
}
private static byte[] byteBufferToArray(ByteBuffer buf) {
byte resultArray[] = new byte[buf.remaining()];
buf.get(resultArray);
buf.flip();
return resultArray;
}
private static final int BLOCK_SIZE =
(int) NativeIO.POSIX.getCacheManipulator().getOperatingSystemPageSize();
public static HdfsConfiguration initZeroCopyTest() {
assumeTrue(NativeIO.isAvailable());
assumeTrue(SystemUtils.IS_OS_UNIX);
HdfsConfiguration conf = new HdfsConfiguration();
conf.setBoolean(HdfsClientConfigKeys.Read.ShortCircuit.KEY, true);
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);
conf.setInt(HdfsClientConfigKeys.Mmap.CACHE_SIZE_KEY, 3);
conf.setLong(HdfsClientConfigKeys.Mmap.CACHE_TIMEOUT_MS_KEY, 100);
conf.set(DFSConfigKeys.DFS_DOMAIN_SOCKET_PATH_KEY,
new File(sockDir.getDir(),
"TestRequestMmapAccess._PORT.sock").getAbsolutePath());
conf.setBoolean(HdfsClientConfigKeys.Read.ShortCircuit.SKIP_CHECKSUM_KEY,
true);
conf.setLong(DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setLong(DFS_CACHEREPORT_INTERVAL_MSEC_KEY, 1000);
conf.setLong(DFS_NAMENODE_PATH_BASED_CACHE_REFRESH_INTERVAL_MS, 1000);
return conf;
}
@Test
public void testZeroCopyReads() throws Exception {
HdfsConfiguration conf = initZeroCopyTest();
MiniDFSCluster cluster = null;
final Path TEST_PATH = new Path("/a");
FSDataInputStream fsIn = null;
final int TEST_FILE_LENGTH = 3 * BLOCK_SIZE;
FileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
fs = cluster.getFileSystem();
DFSTestUtil.createFile(fs, TEST_PATH,
TEST_FILE_LENGTH, (short)1, 7567L);
try {
DFSTestUtil.waitReplication(fs, TEST_PATH, (short)1);
} catch (InterruptedException e) {
fail("unexpected InterruptedException during " +
"waitReplication: " + e);
} catch (TimeoutException e) {
fail("unexpected TimeoutException during " +
"waitReplication: " + e);
}
fsIn = fs.open(TEST_PATH);
byte original[] = new byte[TEST_FILE_LENGTH];
IOUtils.readFully(fsIn, original, 0, TEST_FILE_LENGTH);
fsIn.close();
fsIn = fs.open(TEST_PATH);
ByteBuffer result = fsIn.read(null, BLOCK_SIZE,
EnumSet.of(ReadOption.SKIP_CHECKSUMS));
assertEquals(BLOCK_SIZE, result.remaining());
HdfsDataInputStream dfsIn = (HdfsDataInputStream)fsIn;
assertEquals(BLOCK_SIZE,
dfsIn.getReadStatistics().getTotalBytesRead());
assertEquals(BLOCK_SIZE,
dfsIn.getReadStatistics().getTotalZeroCopyBytesRead());
assertArrayEquals(Arrays.copyOfRange(original, 0, BLOCK_SIZE),
byteBufferToArray(result));
fsIn.releaseBuffer(result);
} finally {
if (fsIn != null) fsIn.close();
if (fs != null) fs.close();
if (cluster != null) cluster.shutdown();
}
}
@Test
public void testShortZeroCopyReads() throws Exception {
HdfsConfiguration conf = initZeroCopyTest();
MiniDFSCluster cluster = null;
final Path TEST_PATH = new Path("/a");
FSDataInputStream fsIn = null;
final int TEST_FILE_LENGTH = 3 * BLOCK_SIZE;
FileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
fs = cluster.getFileSystem();
DFSTestUtil.createFile(fs, TEST_PATH, TEST_FILE_LENGTH, (short)1, 7567L);
try {
DFSTestUtil.waitReplication(fs, TEST_PATH, (short)1);
} catch (InterruptedException e) {
fail("unexpected InterruptedException during " +
"waitReplication: " + e);
} catch (TimeoutException e) {
fail("unexpected TimeoutException during " +
"waitReplication: " + e);
}
fsIn = fs.open(TEST_PATH);
byte original[] = new byte[TEST_FILE_LENGTH];
IOUtils.readFully(fsIn, original, 0, TEST_FILE_LENGTH);
fsIn.close();
fsIn = fs.open(TEST_PATH);
// Try to read (2 * ${BLOCK_SIZE}), but only get ${BLOCK_SIZE} because of the block size.
HdfsDataInputStream dfsIn = (HdfsDataInputStream)fsIn;
ByteBuffer result =
dfsIn.read(null, 2 * BLOCK_SIZE, EnumSet.of(ReadOption.SKIP_CHECKSUMS));
assertEquals(BLOCK_SIZE, result.remaining());
assertEquals(BLOCK_SIZE,
dfsIn.getReadStatistics().getTotalBytesRead());
assertEquals(BLOCK_SIZE,
dfsIn.getReadStatistics().getTotalZeroCopyBytesRead());
assertArrayEquals(Arrays.copyOfRange(original, 0, BLOCK_SIZE),
byteBufferToArray(result));
dfsIn.releaseBuffer(result);
// Try to read (1 + ${BLOCK_SIZE}), but only get ${BLOCK_SIZE} because of the block size.
result =
dfsIn.read(null, 1 + BLOCK_SIZE, EnumSet.of(ReadOption.SKIP_CHECKSUMS));
assertEquals(BLOCK_SIZE, result.remaining());
assertArrayEquals(Arrays.copyOfRange(original, BLOCK_SIZE, 2 * BLOCK_SIZE),
byteBufferToArray(result));
dfsIn.releaseBuffer(result);
} finally {
if (fsIn != null) fsIn.close();
if (fs != null) fs.close();
if (cluster != null) cluster.shutdown();
}
}
@Test
public void testZeroCopyReadsNoFallback() throws Exception {
HdfsConfiguration conf = initZeroCopyTest();
MiniDFSCluster cluster = null;
final Path TEST_PATH = new Path("/a");
FSDataInputStream fsIn = null;
final int TEST_FILE_LENGTH = 3 * BLOCK_SIZE;
FileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
fs = cluster.getFileSystem();
DFSTestUtil.createFile(fs, TEST_PATH,
TEST_FILE_LENGTH, (short)1, 7567L);
try {
DFSTestUtil.waitReplication(fs, TEST_PATH, (short)1);
} catch (InterruptedException e) {
fail("unexpected InterruptedException during " +
"waitReplication: " + e);
} catch (TimeoutException e) {
fail("unexpected TimeoutException during " +
"waitReplication: " + e);
}
fsIn = fs.open(TEST_PATH);
byte original[] = new byte[TEST_FILE_LENGTH];
IOUtils.readFully(fsIn, original, 0, TEST_FILE_LENGTH);
fsIn.close();
fsIn = fs.open(TEST_PATH);
HdfsDataInputStream dfsIn = (HdfsDataInputStream)fsIn;
ByteBuffer result;
try {
result = dfsIn.read(null, BLOCK_SIZE + 1, EnumSet.noneOf(ReadOption.class));
fail("expected UnsupportedOperationException");
} catch (UnsupportedOperationException e) {
// expected
}
result = dfsIn.read(null, BLOCK_SIZE, EnumSet.of(ReadOption.SKIP_CHECKSUMS));
assertEquals(BLOCK_SIZE, result.remaining());
assertEquals(BLOCK_SIZE,
dfsIn.getReadStatistics().getTotalBytesRead());
assertEquals(BLOCK_SIZE,
dfsIn.getReadStatistics().getTotalZeroCopyBytesRead());
assertArrayEquals(Arrays.copyOfRange(original, 0, BLOCK_SIZE),
byteBufferToArray(result));
} finally {
if (fsIn != null) fsIn.close();
if (fs != null) fs.close();
if (cluster != null) cluster.shutdown();
}
}
private static | TestEnhancedByteBufferAccess |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/config/GatewayAutoConfigurationTests.java | {
"start": 23994,
"end": 24190
} | class ____ {
@Bean
ReactiveOAuth2AuthorizedClientManager myReactiveOAuth2AuthorizedClientManager() {
return authorizeRequest -> null;
}
}
}
| TestReactiveOAuth2AuthorizedClientManagerConfig |
java | elastic__elasticsearch | x-pack/plugin/mapper-exponential-histogram/src/main/java/org/elasticsearch/xpack/exponentialhistogram/aggregations/bucket/histogram/ExponentialHistogramBackedHistogramAggregator.java | {
"start": 1583,
"end": 4839
} | class ____ extends AbstractHistogramAggregator {
private final ExponentialHistogramValuesSource.ExponentialHistogram valuesSource;
public ExponentialHistogramBackedHistogramAggregator(
String name,
AggregatorFactories factories,
double interval,
double offset,
BucketOrder order,
boolean keyed,
long minDocCount,
DoubleBounds extendedBounds,
DoubleBounds hardBounds,
ValuesSourceConfig valuesSourceConfig,
AggregationContext context,
Aggregator parent,
CardinalityUpperBound cardinalityUpperBound,
Map<String, Object> metadata
) throws IOException {
super(
name,
factories,
interval,
offset,
order,
keyed,
minDocCount,
extendedBounds,
hardBounds,
valuesSourceConfig.format(),
context,
parent,
cardinalityUpperBound,
metadata
);
this.valuesSource = (ExponentialHistogramValuesSource.ExponentialHistogram) valuesSourceConfig.getValuesSource();
// Sub aggregations are not allowed when running histogram agg over histograms
if (subAggregators().length > 0) {
throw new IllegalArgumentException("Histogram aggregation on histogram fields does not support sub-aggregations");
}
}
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, final LeafBucketCollector sub) throws IOException {
ExponentialHistogramValuesReader values = valuesSource.getHistogramValues(aggCtx.getLeafReaderContext());
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long owningBucketOrd) throws IOException {
if (values.advanceExact(doc)) {
ExponentialHistogram histo = values.histogramValue();
forEachBucketCenter(histo, (center, count) -> {
double key = Math.floor((center - offset) / interval);
if (hardBounds == null || hardBounds.contain(key * interval)) {
long bucketOrd = bucketOrds.add(owningBucketOrd, Double.doubleToLongBits(key));
if (bucketOrd < 0) { // already seen
bucketOrd = -1 - bucketOrd;
collectExistingBucket(sub, doc, bucketOrd);
} else {
collectBucket(sub, doc, bucketOrd);
}
// We have added the document already and we have incremented bucket doc_count
// by _doc_count times. To compensate for this, we should increment doc_count by
// (count - _doc_count) so that we have added it count times.
incrementBucketDocCount(bucketOrd, count - docCountProvider.getDocCount(doc));
}
});
}
}
};
}
@FunctionalInterface
private | ExponentialHistogramBackedHistogramAggregator |
java | apache__camel | components/camel-leveldb/src/test/java/org/apache/camel/component/leveldb/LevelDBExchangeSerializationTest.java | {
"start": 1479,
"end": 3696
} | class ____ extends LevelDBTestSupport {
private LevelDBFile levelDBFile;
@Override
public void doPostSetup() {
deleteDirectory("target/data");
File file = new File("target/data/leveldb.dat");
levelDBFile = new LevelDBFile();
levelDBFile.setFile(file);
levelDBFile.start();
}
@Override
public void doPostTearDown() {
levelDBFile.stop();
}
@Test
public void testExchangeSerialization() {
LevelDBAggregationRepository repo = getRepo();
repo.setLevelDBFile(levelDBFile);
repo.setRepositoryName("repo1");
Exchange exchange = new DefaultExchange(context);
exchange.getIn().setBody("Hello World");
exchange.getIn().setHeader("name", "Claus");
exchange.getIn().setHeader("number", 123);
exchange.setProperty("quote", "Camel rocks");
Date now = new Date();
exchange.getIn().setHeader("date", now);
repo.add(context, "foo", exchange);
Exchange actual = repo.get(context, "foo");
assertEquals("Hello World", actual.getIn().getBody());
assertEquals("Claus", actual.getIn().getHeader("name"));
assertEquals(123, actual.getIn().getHeader("number"));
Date date = actual.getIn().getHeader("date", Date.class);
assertNotNull(date);
assertEquals(now.getTime(), date.getTime());
// we do not serialize properties to avoid storing all kind of not needed information
assertNull(actual.getProperty("quote"));
assertSame(context, actual.getContext());
// change something
exchange.getIn().setBody("Bye World");
exchange.getIn().setHeader("name", "Hiram");
exchange.getIn().removeHeader("date");
repo.add(context, "foo", exchange);
actual = repo.get(context, "foo");
assertEquals("Bye World", actual.getIn().getBody());
assertEquals("Hiram", actual.getIn().getHeader("name"));
assertEquals(123, actual.getIn().getHeader("number"));
date = actual.getIn().getHeader("date", Date.class);
assertNull(date);
assertSame(context, actual.getContext());
}
}
| LevelDBExchangeSerializationTest |
java | apache__hadoop | hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccAttr.java | {
"start": 1004,
"end": 1943
} | class ____ {
long size;
NfsTime mtime; // in milliseconds
NfsTime ctime; // in milliseconds
public long getSize() {
return size;
}
public NfsTime getMtime() {
return mtime;
}
public NfsTime getCtime() {
return ctime;
}
public WccAttr() {
this.size = 0;
mtime = null;
ctime = null;
}
public WccAttr(long size, NfsTime mtime, NfsTime ctime) {
this.size = size;
this.mtime = mtime;
this.ctime = ctime;
}
public static WccAttr deserialize(XDR xdr) {
long size = xdr.readHyper();
NfsTime mtime = NfsTime.deserialize(xdr);
NfsTime ctime = NfsTime.deserialize(xdr);
return new WccAttr(size, mtime, ctime);
}
public void serialize(XDR out) {
out.writeLongAsHyper(size);
if (mtime == null) {
mtime = new NfsTime(0);
}
mtime.serialize(out);
if (ctime == null) {
ctime = new NfsTime(0);
}
ctime.serialize(out);
}
} | WccAttr |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/datastreams/UpdateDataStreamSettingsActionResponseTests.java | {
"start": 1403,
"end": 11318
} | class ____ extends AbstractWireSerializingTestCase<UpdateDataStreamSettingsAction.Response> {
@Override
protected Writeable.Reader<UpdateDataStreamSettingsAction.Response> instanceReader() {
return UpdateDataStreamSettingsAction.Response::new;
}
public void testToXContent() throws IOException {
Map<String, String> dataStream1Settings = Map.of("setting1", "value1", "setting2", "value2");
Map<String, String> dataStream1EffectiveSettings = Map.of("setting1", "value1", "setting2", "value2", "setting3", "value3");
List<String> dataStream1AppliedToDataStreamOnly = randomList(10, () -> randomAlphanumericOfLength(10));
List<String> dataStream1AppliedToWriteIndexOnly = randomList(10, () -> randomAlphanumericOfLength(10));
List<String> dataStream1AppliedToBackingIndices = randomList(10, () -> randomAlphanumericOfLength(10));
List<IndexSettingError> dataStream1IndexErrors = randomList(
10,
() -> new IndexSettingError(randomAlphanumericOfLength(10), randomAlphaOfLength(10))
);
Map<String, String> dataStream2Settings = Map.of("setting4", "value4", "setting5", "value5");
Map<String, String> dataStream2EffectiveSettings = Map.of("setting4", "value4", "setting5", "value5", "settings6", "value6");
List<String> dataStream2AppliedToDataStreamOnly = randomList(10, () -> randomAlphanumericOfLength(10));
List<String> dataStream2AppliedToWriteIndexOnly = randomList(10, () -> randomAlphanumericOfLength(10));
List<String> dataStream2AppliedToBackingIndices = randomList(10, () -> randomAlphanumericOfLength(10));
List<IndexSettingError> dataStream2IndexErrors = randomList(
10,
() -> new IndexSettingError(randomAlphanumericOfLength(10), randomAlphaOfLength(10))
);
boolean dataStream1Succeeded = randomBoolean();
String dataStream1Error = randomBoolean() ? null : randomAlphaOfLength(20);
boolean dataStream2Succeeded = randomBoolean();
String dataStream2Error = randomBoolean() ? null : randomAlphaOfLength(20);
UpdateDataStreamSettingsAction.DataStreamSettingsResponse dataStreamSettingsResponse1 =
new UpdateDataStreamSettingsAction.DataStreamSettingsResponse(
"dataStream1",
dataStream1Succeeded,
dataStream1Error,
Settings.builder().loadFromMap(dataStream1Settings).build(),
Settings.builder().loadFromMap(dataStream1EffectiveSettings).build(),
new UpdateDataStreamSettingsAction.DataStreamSettingsResponse.IndicesSettingsResult(
dataStream1AppliedToDataStreamOnly,
dataStream1AppliedToWriteIndexOnly,
dataStream1AppliedToBackingIndices,
dataStream1IndexErrors
)
);
UpdateDataStreamSettingsAction.DataStreamSettingsResponse dataStreamSettingsResponse2 =
new UpdateDataStreamSettingsAction.DataStreamSettingsResponse(
"dataStream2",
dataStream2Succeeded,
dataStream2Error,
Settings.builder().loadFromMap(dataStream2Settings).build(),
Settings.builder().loadFromMap(dataStream2EffectiveSettings).build(),
new UpdateDataStreamSettingsAction.DataStreamSettingsResponse.IndicesSettingsResult(
dataStream2AppliedToDataStreamOnly,
dataStream2AppliedToWriteIndexOnly,
dataStream2AppliedToBackingIndices,
dataStream2IndexErrors
)
);
List<UpdateDataStreamSettingsAction.DataStreamSettingsResponse> responseList = List.of(
dataStreamSettingsResponse1,
dataStreamSettingsResponse2
);
UpdateDataStreamSettingsAction.Response response = new UpdateDataStreamSettingsAction.Response(responseList);
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
builder.humanReadable(true);
response.toXContentChunked(ToXContent.EMPTY_PARAMS).forEachRemaining(xcontent -> {
try {
xcontent.toXContent(builder, EMPTY_PARAMS);
} catch (IOException e) {
fail(e);
}
});
Map<String, Object> xContentMap = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2();
assertThat(
xContentMap,
equalTo(
Map.of(
"data_streams",
List.of(
buildExpectedMap(
"dataStream1",
dataStream1Succeeded,
dataStream1Error,
dataStream1Settings,
dataStream1EffectiveSettings,
dataStream1AppliedToDataStreamOnly,
dataStream1AppliedToWriteIndexOnly,
dataStream1AppliedToBackingIndices,
dataStream1IndexErrors
),
buildExpectedMap(
"dataStream2",
dataStream2Succeeded,
dataStream2Error,
dataStream2Settings,
dataStream2EffectiveSettings,
dataStream2AppliedToDataStreamOnly,
dataStream2AppliedToWriteIndexOnly,
dataStream2AppliedToBackingIndices,
dataStream2IndexErrors
)
)
)
)
);
}
}
private Map<String, Object> buildExpectedMap(
String name,
boolean succeeded,
String error,
Map<String, String> settings,
Map<String, String> effectiveSettings,
List<String> appliedToDataStreamOnly,
List<String> appliedToWriteIndexOnly,
List<String> appliedToIndices,
List<IndexSettingError> indexErrors
) {
Map<String, Object> result = new HashMap<>();
result.put("name", name);
result.put("applied_to_data_stream", succeeded);
if (error != null) {
result.put("error", error);
}
result.put("settings", settings);
result.put("effective_settings", effectiveSettings);
Map<String, Object> indexSettingsResults = new HashMap<>();
indexSettingsResults.put("applied_to_data_stream_only", appliedToDataStreamOnly);
indexSettingsResults.put("applied_to_data_stream_and_write_indices", appliedToWriteIndexOnly);
indexSettingsResults.put("applied_to_data_stream_and_backing_indices", appliedToIndices);
if (indexErrors.isEmpty() == false) {
indexSettingsResults.put(
"errors",
indexErrors.stream()
.map(indexSettingError -> Map.of("index", indexSettingError.indexName(), "error", indexSettingError.errorMessage()))
.toList()
);
}
result.put("index_settings_results", indexSettingsResults);
return result;
}
@Override
protected UpdateDataStreamSettingsAction.Response createTestInstance() {
return new UpdateDataStreamSettingsAction.Response(randomList(10, this::randomDataStreamSettingsResponse));
}
private UpdateDataStreamSettingsAction.DataStreamSettingsResponse randomDataStreamSettingsResponse() {
return new UpdateDataStreamSettingsAction.DataStreamSettingsResponse(
"dataStream1",
randomBoolean(),
randomBoolean() ? null : randomAlphaOfLength(20),
randomSettings(),
randomSettings(),
randomIndicesSettingsResult()
);
}
private UpdateDataStreamSettingsAction.DataStreamSettingsResponse.IndicesSettingsResult randomIndicesSettingsResult() {
return new UpdateDataStreamSettingsAction.DataStreamSettingsResponse.IndicesSettingsResult(
randomList(10, () -> randomAlphanumericOfLength(20)),
randomList(10, () -> randomAlphanumericOfLength(20)),
randomList(10, () -> randomAlphanumericOfLength(20)),
randomList(10, this::randomIndexSettingError)
);
}
private IndexSettingError randomIndexSettingError() {
return new IndexSettingError(randomAlphanumericOfLength(20), randomAlphanumericOfLength(20));
}
@Override
protected UpdateDataStreamSettingsAction.Response mutateInstance(UpdateDataStreamSettingsAction.Response instance) throws IOException {
List<UpdateDataStreamSettingsAction.DataStreamSettingsResponse> responseList = instance.getDataStreamSettingsResponses();
List<UpdateDataStreamSettingsAction.DataStreamSettingsResponse> mutatedResponseList = new ArrayList<>(responseList);
switch (between(0, 1)) {
case 0 -> {
if (responseList.isEmpty()) {
mutatedResponseList.add(randomDataStreamSettingsResponse());
} else {
mutatedResponseList.remove(randomInt(responseList.size() - 1));
}
}
case 1 -> {
mutatedResponseList.add(randomDataStreamSettingsResponse());
}
default -> throw new AssertionError("Should not be here");
}
return new UpdateDataStreamSettingsAction.Response(mutatedResponseList);
}
}
| UpdateDataStreamSettingsActionResponseTests |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/PreparedStatementSetter.java | {
"start": 1483,
"end": 1763
} | class ____ catch and handle SQLExceptions appropriately.
*
* @author Rod Johnson
* @since March 2, 2003
* @see JdbcTemplate#update(String, PreparedStatementSetter)
* @see JdbcTemplate#query(String, PreparedStatementSetter, ResultSetExtractor)
*/
@FunctionalInterface
public | will |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/schemagen/SchemaScriptFileGenerationFailureTest.java | {
"start": 4481,
"end": 4773
} | class ____ extends Writer {
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
}
@Override
public void flush() throws IOException {
throw new IOException( "Expected" );
}
@Override
public void close() throws IOException {
}
}
}
| FailingWriter |
java | alibaba__nacos | lock/src/main/java/com/alibaba/nacos/lock/constant/Constants.java | {
"start": 767,
"end": 878
} | class ____ {
public static final String LOCK_ACQUIRE_SERVICE_GROUP_V2 = "lock_acquire_service_v2";
}
| Constants |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.