language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
|
{
"start": 1691,
"end": 5278
}
|
class ____.
* @param conf
* @return ImpersonationProvider
*/
private static ImpersonationProvider getInstance(Configuration conf) {
Class<? extends ImpersonationProvider> clazz =
conf.getClass(
CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS,
DefaultImpersonationProvider.class, ImpersonationProvider.class);
return ReflectionUtils.newInstance(clazz, conf);
}
/**
* refresh Impersonation rules
*/
public static void refreshSuperUserGroupsConfiguration() {
//load server side configuration;
refreshSuperUserGroupsConfiguration(new Configuration());
}
/**
* Refreshes configuration using the specified Proxy user prefix for
* properties.
*
* @param conf configuration
* @param proxyUserPrefix proxy user configuration prefix
*/
public static void refreshSuperUserGroupsConfiguration(Configuration conf,
String proxyUserPrefix) {
Preconditions.checkArgument(proxyUserPrefix != null &&
!proxyUserPrefix.isEmpty(), "prefix cannot be NULL or empty");
// sip is volatile. Any assignment to it as well as the object's state
// will be visible to all the other threads.
ImpersonationProvider ip = getInstance(conf);
ip.init(proxyUserPrefix);
sip = ip;
ProxyServers.refresh(conf);
}
/**
* Refreshes configuration using the default Proxy user prefix for properties.
* @param conf configuration
*/
public static void refreshSuperUserGroupsConfiguration(Configuration conf) {
refreshSuperUserGroupsConfiguration(conf, CONF_HADOOP_PROXYUSER);
}
/**
* Authorize the superuser which is doing doAs.
* {@link #authorize(UserGroupInformation, InetAddress)} should be preferred
* to avoid possibly re-resolving the ip address.
*
* @param user ugi of the effective or proxy user which contains a real user
* @param remoteAddress the ip address of client
* @throws AuthorizationException Authorization Exception.
*/
public static void authorize(UserGroupInformation user,
String remoteAddress) throws AuthorizationException {
getSip().authorize(user, remoteAddress);
}
/**
* Authorize the superuser which is doing doAs.
*
* @param user ugi of the effective or proxy user which contains a real user
* @param remoteAddress the inet address of client
* @throws AuthorizationException Authorization Exception.
*/
public static void authorize(UserGroupInformation user,
InetAddress remoteAddress) throws AuthorizationException {
getSip().authorize(user, remoteAddress);
}
private static ImpersonationProvider getSip() {
if (sip == null) {
// In a race situation, It is possible for multiple threads to satisfy
// this condition.
// The last assignment will prevail.
refreshSuperUserGroupsConfiguration();
}
return sip;
}
/**
* This function is kept to provide backward compatibility.
* @param user user.
* @param remoteAddress remote address.
* @param conf configuration.
* @throws AuthorizationException Authorization Exception.
* @deprecated use {@link #authorize(UserGroupInformation, String)} instead.
*/
@Deprecated
public static void authorize(UserGroupInformation user,
String remoteAddress, Configuration conf) throws AuthorizationException {
authorize(user, remoteAddress);
}
@VisibleForTesting
public static DefaultImpersonationProvider getDefaultImpersonationProvider() {
return ((DefaultImpersonationProvider) getSip());
}
}
|
specified
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/JavaBeanBinderTests.java
|
{
"start": 31161,
"end": 31388
}
|
class ____ extends ExampleSuperClassBean {
private long longValue;
long getLongValue() {
return this.longValue;
}
void setLongValue(long longValue) {
this.longValue = longValue;
}
}
static
|
ExampleSubclassBean
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
|
{
"start": 25481,
"end": 26274
}
|
class ____
extends RichParallelSourceFunction<Integer> implements CheckpointListener {
private volatile boolean running = true;
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
while (running) {
synchronized (ctx.getCheckpointLock()) {
ctx.collect(getRuntimeContext().getTaskInfo().getIndexOfThisSubtask());
Thread.sleep(5L);
}
}
}
@Override
public void cancel() {
running = false;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
throw new RuntimeException("Test exception.");
}
}
}
|
FailOnCompletedCheckpointSource
|
java
|
processing__processing4
|
java/src/processing/mode/java/Compiler.java
|
{
"start": 3722,
"end": 7156
}
|
class ____
// so that it can grab the compiler JAR files from it.
ClassLoader loader = build.mode.getClassLoader();
try {
Class<?> batchClass =
Class.forName("org.eclipse.jdt.core.compiler.batch.BatchCompiler", false, loader);
Class<?> progressClass =
Class.forName("org.eclipse.jdt.core.compiler.CompilationProgress", false, loader);
Class<?>[] compileArgs =
new Class<?>[] { String[].class, PrintWriter.class, PrintWriter.class, progressClass };
Method compileMethod = batchClass.getMethod("compile", compileArgs);
success = (Boolean)
compileMethod.invoke(null, new Object[] { command, outWriter, writer, null });
} catch (Exception e) {
e.printStackTrace();
throw new SketchException("Unknown error inside the compiler.");
}
// Close out the stream for good measure
writer.flush();
writer.close();
BufferedReader reader =
new BufferedReader(new StringReader(errorBuffer.toString()));
//System.err.println(errorBuffer.toString());
String line;
while ((line = reader.readLine()) != null) {
//System.out.println("got line " + line); // debug
// get first line, which contains file name, line number,
// and at least the first line of the error message
String errorFormat = "([\\w\\d_]+\\.java):(\\d+):\\s*([^:]*):\\s*(.*)\\s*";
String[] pieces = PApplet.match(line, errorFormat);
//PApplet.println(pieces);
// if it's something unexpected, die and print the mess to the console
if (pieces == null) {
exception = new SketchException("Cannot parse error text: " + line);
exception.hideStackTrace();
// Send out the rest of the error message to the console.
System.err.println(line);
while ((line = reader.readLine()) != null) {
System.err.println(line);
}
break;
}
// translate the java filename and line number into a un-preprocessed
// location inside a source file or tab in the environment.
String dotJavaFilename = pieces[1];
// Line numbers are 1-indexed from javac
int dotJavaLineIndex = PApplet.parseInt(pieces[2]) - 1;
String errorMessage = pieces[4];
exception = build.placeException(errorMessage,
dotJavaFilename,
dotJavaLineIndex);
if (exception == null) {
exception = new SketchException(errorMessage);
}
String[] parts;
if (errorMessage.startsWith("The import ") &&
errorMessage.endsWith("cannot be resolved")) {
// The import poo cannot be resolved
//import poo.shoe.blah.*;
//String what = errorMessage.substring("The import ".length());
String[] m = PApplet.match(errorMessage, "The import (.*) cannot be resolved");
//what = what.substring(0, what.indexOf(' '));
if (m != null) {
// System.out.println("'" + m[1] + "'");
if (m[1].equals("processing.xml")) {
exception.setMessage("processing.xml no longer exists, this code needs to be updated for 2.0.");
System.err.println("The processing.xml library has been replaced " +
"with a new 'XML'
|
loader
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/ConcatFilesTaskTests.java
|
{
"start": 812,
"end": 2951
}
|
class ____ {
@Test
public void testHeaderAdded() throws IOException {
Project project = createProject();
ConcatFilesTask concatFilesTask = createTask(project);
concatFilesTask.setHeaderLine("Header");
File file = new File(project.getProjectDir(), "src/main/java/Code.java");
file.getParentFile().mkdirs();
file.createNewFile();
concatFilesTask.setTarget(file);
concatFilesTask.setFiles(project.fileTree("tmp/"));
concatFilesTask.concatFiles();
assertEquals(Arrays.asList("Header"), Files.readAllLines(concatFilesTask.getTarget().toPath(), StandardCharsets.UTF_8));
file.delete();
}
@Test
public void testConcatenationWithUnique() throws IOException {
Project project = createProject();
ConcatFilesTask concatFilesTask = createTask(project);
File file = new File(project.getProjectDir(), "src/main/java/Code.java");
file.getParentFile().mkdirs();
file.createNewFile();
concatFilesTask.setTarget(file);
File file1 = new File(project.getProjectDir(), "src/main/input/java/file1.java");
File file2 = new File(project.getProjectDir(), "src/main/input/text/file2.txt");
file1.getParentFile().mkdirs();
file2.getParentFile().mkdirs();
file1.createNewFile();
file2.createNewFile();
Files.writeString(file1.toPath(), "Hello" + System.lineSeparator() + "Hello");
Files.writeString(file2.toPath(), "Hello" + System.lineSeparator() + "नमस्ते");
concatFilesTask.setFiles(project.fileTree(file1.getParentFile().getParentFile()));
concatFilesTask.concatFiles();
assertEquals(Arrays.asList("Hello", "नमस्ते"), Files.readAllLines(concatFilesTask.getTarget().toPath(), StandardCharsets.UTF_8));
}
private Project createProject() {
Project project = ProjectBuilder.builder().build();
return project;
}
private ConcatFilesTask createTask(Project project) {
return project.getTasks().create("concatFilesTask", ConcatFilesTask.class);
}
}
|
ConcatFilesTaskTests
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/session/ForceEagerSessionCreationFilterTests.java
|
{
"start": 947,
"end": 1418
}
|
class ____ {
@Test
void createsSession() throws Exception {
ForceEagerSessionCreationFilter filter = new ForceEagerSessionCreationFilter();
MockHttpServletRequest request = new MockHttpServletRequest();
MockFilterChain chain = new MockFilterChain();
filter.doFilter(request, new MockHttpServletResponse(), chain);
assertThat(request.getSession(false)).isNotNull();
assertThat(chain.getRequest()).isEqualTo(request);
}
}
|
ForceEagerSessionCreationFilterTests
|
java
|
apache__flink
|
flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/ParquetInputFile.java
|
{
"start": 1853,
"end": 2393
}
|
class ____ extends DelegatingSeekableInputStream {
private final FSDataInputStream inputStream;
private FSDataInputStreamAdapter(FSDataInputStream inputStream) {
super(inputStream);
this.inputStream = inputStream;
}
@Override
public long getPos() throws IOException {
return inputStream.getPos();
}
@Override
public void seek(long newPos) throws IOException {
inputStream.seek(newPos);
}
}
}
|
FSDataInputStreamAdapter
|
java
|
netty__netty
|
codec-http2/src/test/java/io/netty/handler/codec/http2/AbstractWeightedFairQueueByteDistributorDependencyTest.java
|
{
"start": 973,
"end": 3010
}
|
class ____ {
Http2Connection connection;
WeightedFairQueueByteDistributor distributor;
private final IntObjectMap<TestStreamByteDistributorStreamState> stateMap =
new IntObjectHashMap<TestStreamByteDistributorStreamState>();
@Mock
StreamByteDistributor.Writer writer;
Http2Stream stream(int streamId) {
return connection.stream(streamId);
}
Answer<Void> writeAnswer(final boolean closeIfNoFrame) {
return new Answer<Void>() {
@Override
public Void answer(InvocationOnMock in) throws Throwable {
Http2Stream stream = in.getArgument(0);
int numBytes = in.getArgument(1);
TestStreamByteDistributorStreamState state = stateMap.get(stream.id());
state.pendingBytes -= numBytes;
state.hasFrame = state.pendingBytes > 0;
state.isWriteAllowed = state.hasFrame;
if (closeIfNoFrame && !state.hasFrame) {
stream.close();
}
distributor.updateStreamableBytes(state);
return null;
}
};
}
void initState(final int streamId, final long streamableBytes, final boolean hasFrame) {
initState(streamId, streamableBytes, hasFrame, hasFrame);
}
void initState(final int streamId, final long pendingBytes, final boolean hasFrame,
final boolean isWriteAllowed) {
final Http2Stream stream = stream(streamId);
TestStreamByteDistributorStreamState state = new TestStreamByteDistributorStreamState(stream, pendingBytes,
hasFrame, isWriteAllowed);
stateMap.put(streamId, state);
distributor.updateStreamableBytes(state);
}
void setPriority(int streamId, int parent, int weight, boolean exclusive) throws Http2Exception {
distributor.updateDependencyTree(streamId, parent, (short) weight, exclusive);
}
}
|
AbstractWeightedFairQueueByteDistributorDependencyTest
|
java
|
playframework__playframework
|
core/play/src/main/java/play/controllers/AssetsComponents.java
|
{
"start": 461,
"end": 1303
}
|
interface ____
extends ConfigurationComponents, HttpErrorHandlerComponents, FileMimeTypesComponents {
Environment environment();
ApplicationLifecycle applicationLifecycle();
default AssetsConfiguration assetsConfiguration() {
return AssetsConfiguration$.MODULE$.fromConfiguration(
configuration(), environment().asScala().mode());
}
default AssetsMetadata assetsMetadata() {
return new AssetsMetadataProvider(
environment().asScala(),
assetsConfiguration(),
fileMimeTypes().asScala(),
applicationLifecycle().asScala())
.get();
}
default AssetsFinder assetsFinder() {
return assetsMetadata().finder();
}
default Assets assets() {
return new Assets(scalaHttpErrorHandler(), assetsMetadata(), environment().asScala());
}
}
|
AssetsComponents
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemafilter/SchemaFilterTest.java
|
{
"start": 6169,
"end": 6420
}
|
class ____ {
@Id
private long id;
public long getId() {
return id;
}
public void setId( long id ) {
this.id = id;
}
}
@Entity
@jakarta.persistence.Table(name = "the_entity_3", schema = "the_schema_2")
public static
|
Schema1Entity2
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/TopologyDescription.java
|
{
"start": 4953,
"end": 5466
}
|
interface ____ extends Node {
/**
* The topic names this source node is reading from.
* @return a set of topic names
*/
@SuppressWarnings("unused")
Set<String> topicSet();
/**
* The pattern used to match topic names that is reading from.
* @return the pattern used to match topic names
*/
@SuppressWarnings("unused")
Pattern topicPattern();
}
/**
* A processor node of a topology.
*/
|
Source
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/planner/loader/PlannerModule.java
|
{
"start": 7361,
"end": 7537
}
|
class ____ extending {@link ComponentClassLoader} which overwrites method{@link #addURL}
* to enable it can add url to component classloader.
*/
private static
|
loader
|
java
|
quarkusio__quarkus
|
extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/NodeSelectorConfig.java
|
{
"start": 50,
"end": 228
}
|
interface ____ {
/**
* The key of the nodeSelector.
*/
String key();
/**
* The value of the nodeSelector.
*/
String value();
}
|
NodeSelectorConfig
|
java
|
apache__camel
|
components/camel-resourceresolver-github/src/main/java/org/apache/camel/github/GitHubResource.java
|
{
"start": 1074,
"end": 2043
}
|
class ____ extends ResourceSupport {
private final CamelContext camelContext;
private byte[] cache;
private boolean init;
public GitHubResource(CamelContext camelContext, String location) {
super("github", location);
this.camelContext = camelContext;
}
@Override
public boolean exists() {
if (!init) {
try {
URL u = URI.create(getLocation()).toURL();
try (InputStream is = u.openStream()) {
cache = camelContext.getTypeConverter().tryConvertTo(byte[].class, is);
}
} catch (Exception e) {
// ignore
}
init = true;
}
return cache != null;
}
@Override
public InputStream getInputStream() throws IOException {
if (exists()) {
return new ByteArrayInputStream(cache);
} else {
return null;
}
}
}
|
GitHubResource
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchPhraseFunctionIT.java
|
{
"start": 925,
"end": 12204
}
|
class ____ extends AbstractEsqlIntegTestCase {
@Before
public void setupIndex() {
createAndPopulateIndex(this::ensureYellow);
}
public void testSimpleWhereMatchPhrase() {
var query = """
FROM test
| WHERE match_phrase(content, "brown fox")
| KEEP id
| SORT id
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id"));
assertColumnTypes(resp.columns(), List.of("integer"));
assertValues(resp.values(), List.of(List.of(1), List.of(6)));
}
}
public void testSimpleWhereMatchPhraseNoResults() {
var query = """
FROM test
| WHERE match_phrase(content, "fox brown")
| KEEP id
| SORT id
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id"));
assertColumnTypes(resp.columns(), List.of("integer"));
assertValues(resp.values(), Collections.emptyList());
}
}
public void testSimpleWhereMatchPhraseAndSlop() {
var query = """
FROM test
| WHERE match_phrase(content, "fox brown", {"slop": 5})
| KEEP id
| SORT id
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id"));
assertColumnTypes(resp.columns(), List.of("integer"));
assertValues(resp.values(), List.of(List.of(1), List.of(6)));
}
}
public void testCombinedWhereMatchPhrase() {
var query = """
FROM test
| WHERE match_phrase(content, "brown fox") AND id > 5
| KEEP id
| SORT id
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id"));
assertColumnTypes(resp.columns(), List.of("integer"));
assertValues(resp.values(), List.of(List.of(6)));
}
}
public void testMultipleMatchPhrase() {
var query = """
FROM test
| WHERE match_phrase(content, "the quick") AND match_phrase(content, "brown fox")
| KEEP id
| SORT id
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id"));
assertColumnTypes(resp.columns(), List.of("integer"));
assertValues(resp.values(), List.of(List.of(6)));
}
}
public void testMultipleWhereMatchPhrase() {
var query = """
FROM test
| WHERE match_phrase(content, "the quick") AND match_phrase(content, "brown fox")
| EVAL summary = CONCAT("document with id: ", to_str(id), "and content: ", content)
| SORT summary
| LIMIT 4
| WHERE match_phrase(content, "lazy dog")
| KEEP id
""";
var error = expectThrows(ElasticsearchException.class, () -> run(query));
assertThat(error.getMessage(), containsString("[MatchPhrase] function cannot be used after LIMIT"));
}
public void testNotWhereMatchPhrase() {
var query = """
FROM test
| WHERE NOT match_phrase(content, "brown fox")
| KEEP id
| SORT id
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id"));
assertColumnTypes(resp.columns(), List.of("integer"));
assertValues(resp.values(), List.of(List.of(2), List.of(3), List.of(4), List.of(5)));
}
}
public void testWhereMatchPhraseWithScoring() {
var query = """
FROM test
METADATA _score
| WHERE match_phrase(content, "brown fox")
| KEEP id, _score
| SORT id ASC
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id", "_score"));
assertColumnTypes(resp.columns(), List.of("integer", "double"));
assertValues(resp.values(), List.of(List.of(1, 1.4274532794952393), List.of(6, 1.1248723268508911)));
}
}
public void testWhereMatchPhraseWithScoringDifferentSort() {
var query = """
FROM test
METADATA _score
| WHERE match_phrase(content, "brown fox")
| KEEP id, _score
| SORT id DESC
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id", "_score"));
assertColumnTypes(resp.columns(), List.of("integer", "double"));
assertValues(resp.values(), List.of(List.of(6, 1.1248723268508911), List.of(1, 1.4274532794952393)));
}
}
public void testWhereMatchPhraseWithScoringSortScore() {
var query = """
FROM test
METADATA _score
| WHERE match_phrase(content, "brown fox")
| KEEP id, _score
| SORT _score DESC
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id", "_score"));
assertColumnTypes(resp.columns(), List.of("integer", "double"));
assertValues(resp.values(), List.of(List.of(1, 1.4274532794952393), List.of(6, 1.1248723268508911)));
}
}
public void testWhereMatchPhraseWithScoringNoSort() {
var query = """
FROM test
METADATA _score
| WHERE match_phrase(content, "brown fox")
| KEEP id, _score
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id", "_score"));
assertColumnTypes(resp.columns(), List.of("integer", "double"));
assertValuesInAnyOrder(resp.values(), List.of(List.of(1, 1.4274532794952393), List.of(6, 1.1248723268508911)));
}
}
public void testNonExistingColumn() {
var query = """
FROM test
| WHERE match_phrase(something, "brown fox")
""";
var error = expectThrows(VerificationException.class, () -> run(query));
assertThat(error.getMessage(), containsString("Unknown column [something]"));
}
public void testWhereMatchPhraseEvalColumn() {
var query = """
FROM test
| EVAL upper_content = to_upper(content)
| WHERE match_phrase(upper_content, "BROWN FOX")
| KEEP id
""";
var error = expectThrows(VerificationException.class, () -> run(query));
assertThat(
error.getMessage(),
containsString("[MatchPhrase] function cannot operate on [upper_content], which is not a field from an index mapping")
);
}
public void testWhereMatchPhraseOverWrittenColumn() {
var query = """
FROM test
| DROP content
| EVAL content = CONCAT("document with ID ", to_str(id))
| WHERE match_phrase(content, "document content")
""";
var error = expectThrows(VerificationException.class, () -> run(query));
assertThat(
error.getMessage(),
containsString("[MatchPhrase] function cannot operate on [content], which is not a field from an index mapping")
);
}
public void testWhereMatchPhraseAfterStats() {
var query = """
FROM test
| STATS count(*)
| WHERE match_phrase(content, "brown fox")
""";
var error = expectThrows(VerificationException.class, () -> run(query));
assertThat(error.getMessage(), containsString("Unknown column [content]"));
}
public void testWhereMatchPhraseNotPushedDown() {
var query = """
FROM test
| WHERE match_phrase(content, "brown fox") OR length(content) < 20
| KEEP id
| SORT id
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("id"));
assertColumnTypes(resp.columns(), List.of("integer"));
assertValues(resp.values(), List.of(List.of(1), List.of(2), List.of(6)));
}
}
public void testWhereMatchPhraseWithRow() {
var query = """
ROW content = "a brown fox"
| WHERE match_phrase(content, "brown fox")
""";
var error = expectThrows(ElasticsearchException.class, () -> run(query));
assertThat(
error.getMessage(),
containsString("line 2:22: [MatchPhrase] function cannot operate on [content], which is not a field from an index mapping")
);
}
public void testMatchPhraseWithStats() {
var errorQuery = """
FROM test
| STATS c = count(*) BY match_phrase(content, "brown fox")
""";
var error = expectThrows(ElasticsearchException.class, () -> run(errorQuery));
assertThat(error.getMessage(), containsString("[MatchPhrase] function is only supported in WHERE and STATS commands"));
var query = """
FROM test
| STATS c = count(*) WHERE match_phrase(content, "brown fox"), d = count(*) WHERE match_phrase(content, "lazy dog")
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("c", "d"));
assertColumnTypes(resp.columns(), List.of("long", "long"));
assertValues(resp.values(), List.of(List.of(2L, 1L)));
}
query = """
FROM test METADATA _score
| WHERE match_phrase(content, "brown fox")
| STATS m = max(_score), n = min(_score)
""";
try (var resp = run(query)) {
assertColumnNames(resp.columns(), List.of("m", "n"));
assertColumnTypes(resp.columns(), List.of("double", "double"));
List<List<Object>> valuesList = getValuesList(resp.values());
assertEquals(1, valuesList.size());
assertThat((double) valuesList.get(0).get(0), Matchers.greaterThan(1.0));
assertThat((double) valuesList.get(0).get(1), Matchers.greaterThan(0.0));
}
}
public void testMatchPhraseWithinEval() {
var query = """
FROM test
| EVAL matches_query = match_phrase(content, "brown fox")
""";
var error = expectThrows(VerificationException.class, () -> run(query));
assertThat(error.getMessage(), containsString("[MatchPhrase] function is only supported in WHERE and STATS commands"));
}
public void testMatchPhraseWithLookupJoin() {
var query = """
FROM test
| LOOKUP JOIN test_lookup ON id
| WHERE id > 0 AND MATCH_PHRASE(lookup_content, "fox")
""";
var error = expectThrows(VerificationException.class, () -> run(query));
assertThat(
error.getMessage(),
containsString(
"line 3:33: [MatchPhrase] function cannot operate on [lookup_content], supplied by an index [test_lookup] "
+ "in non-STANDARD mode [lookup]"
)
);
}
}
|
MatchPhraseFunctionIT
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/dataview/StateMapView.java
|
{
"start": 11416,
"end": 12249
}
|
class ____<N, EK, EV>
extends StateMapViewWithKeysNotNull<N, EK, EV> {
private final InternalMapState<?, N, EK, EV> internalMapState;
private N namespace;
public NamespacedStateMapViewWithKeysNotNull(
InternalMapState<?, N, EK, EV> internalMapState) {
this.internalMapState = internalMapState;
}
@Override
public void setCurrentNamespace(N namespace) {
this.namespace = namespace;
}
@Override
protected MapState<EK, EV> getMapState() {
internalMapState.setCurrentNamespace(namespace);
return internalMapState;
}
}
/** A state {@link MapView} which supports nullable keys but does not support namespace. */
public static final
|
NamespacedStateMapViewWithKeysNotNull
|
java
|
quarkusio__quarkus
|
extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoConfig.java
|
{
"start": 633,
"end": 4014
}
|
interface ____ {
String CONFIG_NAME = "mongodb";
String DEFAULT_CLIENT_NAME = "<default>";
/**
* Configures the Mongo clients.
* <p>
* The default client does not have a name, and it is configured as:
*
* <pre>
* quarkus.mongodb.connection-string = mongodb://mongo1:27017
* </pre>
*
* And then use {@link jakarta.inject.Inject} to inject the client:
*
* <pre>
* @Inject
* MongoClient mongoClient;
* </pre>
*
* <p>
* Named clusters must be identified to select the right client:
*
* <pre>
* quarkus.mongodb.cluster1.connection-string = mongodb://mongo1:27017
* quarkus.mongodb.cluster2.connection-string = mongodb://mongo2:27017,mongodb://mongo3:27017
* </pre>
*
* And then use the {@link io.quarkus.mongodb.MongoClientName} annotation to select any of the beans:
* <ul>
* <li>{@link com.mongodb.client.MongoClient}</li>
* <li>{@link io.quarkus.mongodb.reactive.ReactiveMongoClient}</li>
* </ul>
* And inject the client:
*
* <pre>
* {@code
* @MongoClientName("cluster1")
* @Inject
* ReactiveMongoClient mongoClientCluster1
* }
* </pre>
*/
@WithParentName
@WithDefaults
@WithUnnamedKey(DEFAULT_CLIENT_NAME)
@ConfigDocMapKey("mongo-client-name")
Map<String, MongoClientConfig> clients();
/**
* The default DNS resolver used to handle {@code mongo+srv://} urls cannot be used in a native executable.
* This option enables a fallback to use Vert.x to resolve the server names instead of JNDI.
*
* <strong>IMPORTANT:</strong> The resolution may be different in JVM mode using the default (JNDI-based) DNS resolver,
* and in native mode. This feature is experimental.
*
* @deprecated This resolver is always used
*/
@Deprecated
@WithName("native.dns.use-vertx-dns-resolver")
@WithDefault("false")
boolean useVertxDnsResolverInNativeMode();
/**
* This property configures the DNS server. If the server is not set, it tries to read the first {@code nameserver} from
* {@code /etc /resolv.conf} (if the file exists), otherwise fallback to the default.
*/
@WithName("dns.server-host")
Optional<String> dnsServer();
/**
* This property configures the DNS server port.
*/
@WithName("dns.server-port")
OptionalInt dnsServerPort();
/**
* If {@code native.dns.use-vertx-dns-resolver} is set to {@code true}, this property configures the DNS lookup timeout
* duration.
*/
@WithName("dns.lookup-timeout")
@WithDefault("5s")
Duration dnsLookupTimeout();
/**
* This property enables the logging ot the DNS lookup. It can be useful to understand why the lookup fails.
*/
@WithDefault("false")
@WithName("dns.log-activity")
boolean dnsLookupLogActivity();
static boolean isDefaultClient(final String name) {
return DEFAULT_CLIENT_NAME.equalsIgnoreCase(name);
}
static String getPropertyName(final String name, final String attribute) {
String prefix = DEFAULT_CLIENT_NAME.equals(name)
? "quarkus.mongodb."
: "quarkus.mongodb." + (name.contains(".") ? "\"" + name + "\"" : name) + ".";
return prefix + attribute;
}
}
|
MongoConfig
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 1131894,
"end": 1131998
}
|
class ____ use when unmarshalling", displayName = "Instance Class")
}
)
public static
|
to
|
java
|
micronaut-projects__micronaut-core
|
http-client-core/src/main/java/io/micronaut/http/client/HttpClientConfiguration.java
|
{
"start": 41309,
"end": 41686
}
|
enum ____ {
/**
* The connection pool introduced in micronaut-core 4.0.0.
*/
V4_0,
/**
* The connection pool introduced in micronaut-core 4.9.0.
*/
V4_9
}
}
/**
* Configuration for WebSocket client compression extensions.
*/
public static
|
PoolVersion
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/component/AbstractApiConsumer.java
|
{
"start": 3110,
"end": 3870
}
|
class ____ call super.doInvokeMethod() to invoke the API method.
*
* @param args method arguments from endpoint parameters.
* @return method invocation result.
*/
protected Object doInvokeMethod(Map<String, Object> args) {
return ApiMethodHelper.invokeMethod(endpoint.getApiProxy(method, args), method, args);
}
@Override
public Object splitResult(Object result) {
return result;
}
@Override
public void interceptResult(Object result, Exchange resultExchange) {
// do nothing by default
}
public final boolean isSplitResult() {
return splitResult;
}
public final void setSplitResult(boolean splitResult) {
this.splitResult = splitResult;
}
}
|
MUST
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserMlNodeTaskSettings.java
|
{
"start": 704,
"end": 2114
}
|
class ____ implements TaskSettings {
public static final String NAME = "elser_mlnode_task_settings";
public static final ElserMlNodeTaskSettings DEFAULT = new ElserMlNodeTaskSettings();
public ElserMlNodeTaskSettings() {}
public ElserMlNodeTaskSettings(StreamInput in) {}
@Override
public boolean isEmpty() {
return true;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.V_8_11_X;
}
@Override
public void writeTo(StreamOutput out) throws IOException {}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || getClass() != obj.getClass()) return false;
return true;
}
@Override
public int hashCode() {
// TODO Class has no members all instances are equivalent
// Return the hash of NAME to make the serialization tests pass
return Objects.hash(NAME);
}
@Override
public TaskSettings updatedTaskSettings(Map<String, Object> newSettings) {
return DEFAULT;
}
}
|
ElserMlNodeTaskSettings
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jetty/src/main/java/org/springframework/boot/jetty/JettyHandlerWrappers.java
|
{
"start": 1375,
"end": 2366
}
|
class ____ {
private JettyHandlerWrappers() {
}
static Handler.Wrapper createGzipHandlerWrapper(Compression compression) {
CompressionHandler compressionHandler = new CompressionHandler();
GzipCompression gzip = new GzipCompression();
gzip.setMinCompressSize((int) compression.getMinResponseSize().toBytes());
compressionHandler.putCompression(gzip);
Builder configBuilder = CompressionConfig.builder();
for (String mimeType : compression.getMimeTypes()) {
configBuilder.compressIncludeMimeType(mimeType);
}
for (HttpMethod httpMethod : HttpMethod.values()) {
configBuilder.compressIncludeMethod(httpMethod.name());
}
compressionHandler.putConfiguration(PathSpec.from("/"), configBuilder.build());
return compressionHandler;
}
static Handler.Wrapper createServerHeaderHandlerWrapper(String header) {
return new ServerHeaderHandler(header);
}
/**
* {@link Handler.Wrapper} to add a custom {@code server} header.
*/
private static
|
JettyHandlerWrappers
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/admin/ConfigEntry.java
|
{
"start": 6685,
"end": 7874
}
|
enum ____ {
DYNAMIC_TOPIC_CONFIG, // dynamic topic config that is configured for a specific topic
DYNAMIC_BROKER_LOGGER_CONFIG, // dynamic broker logger config that is configured for a specific broker
DYNAMIC_BROKER_CONFIG, // dynamic broker config that is configured for a specific broker
DYNAMIC_DEFAULT_BROKER_CONFIG, // dynamic broker config that is configured as default for all brokers in the cluster
DYNAMIC_CLIENT_METRICS_CONFIG, // dynamic client metrics subscription config that is configured for all clients
DYNAMIC_GROUP_CONFIG, // dynamic group config that is configured for a specific group
STATIC_BROKER_CONFIG, // static broker config provided as broker properties at start up (e.g. server.properties file)
DEFAULT_CONFIG, // built-in default configuration for configs that have a default value
UNKNOWN // source unknown e.g. in the ConfigEntry used for alter requests where source is not set
}
/**
* Class representing a configuration synonym of a {@link ConfigEntry}.
*/
public static
|
ConfigSource
|
java
|
processing__processing4
|
java/src/processing/mode/java/preproc/SourceUtil.java
|
{
"start": 491,
"end": 11829
}
|
class ____ {
// No longer needed with use of ANTLR in the preprocessor service.
private static final boolean PERFORM_SOURCE_UTIL_TRANSFORMS = false;
public static final Pattern IMPORT_REGEX =
Pattern.compile("(?:^|;)\\s*(import\\s+(?:(static)\\s+)?((?:\\w+\\s*\\.)*)\\s*(\\S+)\\s*;)",
Pattern.MULTILINE | Pattern.DOTALL);
public static final Pattern IMPORT_REGEX_NO_KEYWORD =
Pattern.compile("^\\s*((?:(static)\\s+)?((?:\\w+\\s*\\.)*)\\s*(\\S+))",
Pattern.MULTILINE | Pattern.DOTALL);
public static List<ImportStatement> parseProgramImports(CharSequence source) {
List<ImportStatement> result = new ArrayList<>();
Matcher matcher = IMPORT_REGEX.matcher(source);
while (matcher.find()) {
ImportStatement is = ImportStatement.parse(matcher.toMatchResult());
result.add(is);
}
return result;
}
/*
public static List<Edit> parseProgramImports(CharSequence source,
List<ImportStatement> outImports) {
List<Edit> result = new ArrayList<>();
Matcher matcher = IMPORT_REGEX.matcher(source);
while (matcher.find()) {
ImportStatement is = ImportStatement.parse(matcher.toMatchResult());
outImports.add(is);
int idx = matcher.start(1);
int len = matcher.end(1) - idx;
// Remove the import from the main program
// Substitute with white spaces
result.add(Edit.move(idx, len, 0));
result.add(Edit.insert(0, "\n"));
}
return result;
}
*/
// Positive lookahead and lookbehind are needed to match all type constructors
// in code like `int(byte(245))` where first bracket matches as last
// group in "^int(" but also as a first group in "(byte(". Lookahead and
// lookbehind won't consume the shared character.
public static final Pattern TYPE_CONSTRUCTOR_REGEX =
Pattern.compile("(?<=^|\\W)(int|char|float|boolean|byte)(?=\\s*\\()",
Pattern.MULTILINE);
public static List<Edit> replaceTypeConstructors(CharSequence source) {
List<Edit> result = new ArrayList<>();
Matcher matcher = TYPE_CONSTRUCTOR_REGEX.matcher(source);
while (matcher.find()) {
String match = matcher.group(1);
int offset = matcher.start(1);
int length = match.length();
result.add(Edit.insert(offset, "PApplet."));
String replace = "parse"
+ Character.toUpperCase(match.charAt(0)) + match.substring(1);
result.add(Edit.replace(offset, length, replace));
}
return result;
}
public static final Pattern HEX_LITERAL_REGEX =
Pattern.compile("(?<=^|\\W)(#[A-Fa-f0-9]{6})(?=\\W|$)");
public static List<Edit> replaceHexLiterals(CharSequence source) {
// Find all #[webcolor] and replace with 0xff[webcolor]
// Should be 6 digits only.
List<Edit> result = new ArrayList<>();
Matcher matcher = HEX_LITERAL_REGEX.matcher(source);
while (matcher.find()) {
int offset = matcher.start(1);
result.add(Edit.replace(offset, 1, "0xff"));
}
return result;
}
// Verifies that whole input String is floating point literal. Can't be used for searching.
// https://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-DecimalFloatingPointLiteral
public static final Pattern FLOATING_POINT_LITERAL_VERIFIER;
static {
// TODO lots of "Unnecessary non-capturing group" sequences here,
// but not touching until someone can look more closely.
final String DIGITS = "(?:[0-9]|[0-9][0-9_]*[0-9])";
final String EXPONENT_PART = "(?:[eE][+-]?" + DIGITS + ")";
FLOATING_POINT_LITERAL_VERIFIER = Pattern.compile(
"(?:^" + DIGITS + "\\." + DIGITS + "?" + EXPONENT_PART + "?[fFdD]?$)|" +
"(?:^\\." + DIGITS + EXPONENT_PART + "?[fFdD]?$)|" +
"(?:^" + DIGITS + EXPONENT_PART + "[fFdD]?$)|" +
"(?:^" + DIGITS + EXPONENT_PART + "?[fFdD]$)");
}
// Mask to quickly resolve whether there are any access modifiers present
private static final int ACCESS_MODIFIERS_MASK =
Modifier.PUBLIC | Modifier.PRIVATE | Modifier.PROTECTED;
public static List<Edit> preprocessAST(CompilationUnit cu) {
if (!PERFORM_SOURCE_UTIL_TRANSFORMS) {
return new ArrayList<>();
}
final List<Edit> edits = new ArrayList<>();
// Walk the tree
cu.accept(new ASTVisitor() {
@Override
public boolean visit(SimpleType node) {
// replace "color" with "int"
if ("color".equals(node.getName().toString())) {
edits.add(Edit.replace(node.getStartPosition(), node.getLength(), "int"));
}
return super.visit(node);
}
@Override
public boolean visit(NumberLiteral node) {
// add 'f' to floats
String s = node.getToken().toLowerCase();
if (FLOATING_POINT_LITERAL_VERIFIER.matcher(s).matches() && !s.endsWith("f") && !s.endsWith("d")) {
edits.add(Edit.insert(node.getStartPosition() + node.getLength(), "f"));
}
return super.visit(node);
}
@Override
public boolean visit(MethodDeclaration node) {
// add 'public' to methods with default visibility
int accessModifiers = node.getModifiers() & ACCESS_MODIFIERS_MASK;
if (accessModifiers == 0) {
edits.add(Edit.insert(node.getStartPosition(), "public "));
}
return super.visit(node);
}
});
return edits;
}
public static final Pattern COLOR_TYPE_REGEX =
Pattern.compile("(?:^|^\\p{javaJavaIdentifierPart})(color)\\s(?!\\s*\\()",
Pattern.MULTILINE | Pattern.UNICODE_CHARACTER_CLASS);
public static List<Edit> replaceColorRegex(CharSequence source) {
final List<Edit> edits = new ArrayList<>();
Matcher matcher = COLOR_TYPE_REGEX.matcher(source);
while (matcher.find()) {
int offset = matcher.start(1);
edits.add(Edit.replace(offset, 5, "int"));
}
return edits;
}
public static final Pattern NUMBER_LITERAL_REGEX =
Pattern.compile("[-+]?[0-9]*\\.?[0-9]+(?:[eE][-+]?[0-9]+)?");
public static List<Edit> fixFloatsRegex(CharSequence source) {
final List<Edit> edits = new ArrayList<>();
Matcher matcher = NUMBER_LITERAL_REGEX.matcher(source);
while (matcher.find()) {
int offset = matcher.start();
int end = matcher.end();
String group = matcher.group().toLowerCase();
boolean isFloatingPoint = group.contains(".") || group.contains("e");
boolean hasSuffix = end < source.length() &&
Character.toLowerCase(source.charAt(end)) != 'f' &&
Character.toLowerCase(source.charAt(end)) != 'd';
if (isFloatingPoint && !hasSuffix) {
edits.add(Edit.insert(offset, "f"));
}
}
return edits;
}
/*
static public String scrubCommentsAndStrings(String p) {
StringBuilder sb = new StringBuilder(p);
scrubCommentsAndStrings(sb);
return sb.toString();
}
*/
static public void scrubCommentsAndStrings(StringBuilder p) {
if (!PERFORM_SOURCE_UTIL_TRANSFORMS) {
return;
}
final int length = p.length();
final int OUT = 0;
final int IN_BLOCK_COMMENT = 1;
final int IN_EOL_COMMENT = 2;
final int IN_STRING_LITERAL = 3;
final int IN_CHAR_LITERAL = 4;
int blockStart = -1;
int prevState = OUT;
int state = OUT;
for (int i = 0; i <= length; i++) {
char ch = (i < length) ? p.charAt(i) : 0;
char pch = (i == 0) ? 0 : p.charAt(i-1);
// Get rid of double backslash immediately, otherwise
// the second backslash incorrectly triggers a new escape sequence
if (pch == '\\' && ch == '\\') {
p.setCharAt(i-1, ' ');
p.setCharAt(i, ' ');
pch = ' ';
ch = ' ';
}
switch (state) {
case OUT:
switch (ch) {
case '\'': state = IN_CHAR_LITERAL; break;
case '"': state = IN_STRING_LITERAL; break;
case '*': if (pch == '/') state = IN_BLOCK_COMMENT; break;
case '/': if (pch == '/') state = IN_EOL_COMMENT; break;
}
break;
case IN_BLOCK_COMMENT:
if (pch == '*' && ch == '/' && (i - blockStart) > 0) {
state = OUT;
}
break;
case IN_EOL_COMMENT:
if (ch == '\r' || ch == '\n') {
state = OUT;
}
break;
case IN_STRING_LITERAL:
if ((pch != '\\' && ch == '"') || ch == '\r' || ch == '\n') {
state = OUT;
}
break;
case IN_CHAR_LITERAL:
if ((pch != '\\' && ch == '\'') || ch == '\r' || ch == '\n') {
state = OUT;
}
break;
}
// Terminate ongoing block at last char
if (i == length) {
state = OUT;
}
// Handle state changes
if (state != prevState) {
if (state != OUT) {
// Entering block
blockStart = i + 1;
} else {
// Exiting block
int blockEnd = i;
if (prevState == IN_BLOCK_COMMENT && i < length) blockEnd--; // preserve star in '*/'
for (int j = blockStart; j < blockEnd; j++) {
char c = p.charAt(j);
if (c != '\n' && c != '\r') p.setCharAt(j, ' ');
}
}
}
prevState = state;
}
}
// TODO: move this to a better place when JavaBuild starts using JDT and we
// don't need to check errors at two different places [jv 2017-09-19]
/**
* Checks a single code fragment (such as a tab) for non-matching braces.
* Broken out to allow easy use in JavaBuild.
* @param c Program code scrubbed of comments and string literals.
* @param start Start index, inclusive.
* @param end End index, exclusive.
* @return {@code int[4]} Depth at which the loop stopped, followed by the
* line number, column, and string index (within the range) at which
* an error was found, if any.
*/
static public int[] checkForMissingBraces(CharSequence c, int start, int end) {
int depth = 0;
int lineNumber = 0;
int lineStart = start;
for (int i = start; i < end; i++) {
char ch = c.charAt(i);
switch (ch) {
case '{':
depth++;
break;
case '}':
depth--;
break;
case '\n':
lineNumber++;
lineStart = i;
break;
}
if (depth < 0) {
return new int[] {depth, lineNumber, i - lineStart, i - start};
}
}
return new int[] {depth, lineNumber - 1, end - lineStart - 2, end - start - 2};
}
/**
* Determine how many times a string appears in another.
*
* @param body The string in which occurrences should be counted.
* @param search The string to look for.
* @return The number of times search appears in body.
*/
public static int getCount(String body, String search) {
int count = 0;
if (search.length() == 1) {
for (int i = 0; i < body.length(); i++) {
if (body.charAt(i) == search.charAt(0)) {
count++;
}
}
} else {
for (int i = 0; i < body.length(); i++) {
if (body.substring(i).startsWith(search)) {
count++;
}
}
}
return count;
}
}
|
SourceUtil
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/internal/CoreRealm.java
|
{
"start": 1439,
"end": 1567
}
|
class ____ in use.
*/
@Nonnull
default ClassWorld getClassWorld() {
return getRealm().getWorld();
}
}
|
world
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/Float2DArraysBaseTest.java
|
{
"start": 888,
"end": 1108
}
|
class ____ testing <code>{@link Float2DArrays}</code>.
* <p>
* Is in <code>org.assertj.core.internal</code> package to be able to set {@link Float2DArrays#failures} appropriately.
*
* @author Maciej Wajcht
*/
public
|
for
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/NMClientAsyncImpl.java
|
{
"start": 23367,
"end": 25224
}
|
class ____ implements
MultipleArcTransition<StatefulContainer, ContainerEvent,
ContainerState> {
@Override
public ContainerState transition(
StatefulContainer container, ContainerEvent event) {
ContainerId containerId = event.getContainerId();
try {
StartContainerEvent scEvent = null;
if (event instanceof StartContainerEvent) {
scEvent = (StartContainerEvent) event;
}
assert scEvent != null;
Map<String, ByteBuffer> allServiceResponse =
container.nmClientAsync.getClient().startContainer(
scEvent.getContainer(), scEvent.getContainerLaunchContext());
try {
container.nmClientAsync.getCallbackHandler().onContainerStarted(
containerId, allServiceResponse);
} catch (Throwable thr) {
// Don't process user created unchecked exception
LOG.info("Unchecked exception is thrown from onContainerStarted for "
+ "Container " + containerId, thr);
}
return ContainerState.RUNNING;
} catch (Throwable e) {
return onExceptionRaised(container, event, e);
}
}
private ContainerState onExceptionRaised(StatefulContainer container,
ContainerEvent event, Throwable t) {
try {
container.nmClientAsync.getCallbackHandler().onStartContainerError(
event.getContainerId(), t);
} catch (Throwable thr) {
// Don't process user created unchecked exception
LOG.info(
"Unchecked exception is thrown from onStartContainerError for " +
"Container " + event.getContainerId(), thr);
}
return ContainerState.FAILED;
}
}
protected static
|
StartContainerTransition
|
java
|
quarkusio__quarkus
|
extensions/security/deployment/src/test/java/io/quarkus/security/test/permissionsallowed/MethodLevelCustomPermissionsAllowedTest.java
|
{
"start": 4102,
"end": 5198
}
|
class ____ extends Permission {
private final Permission delegate;
public CustomPermission(String name, String... actions) {
super(name);
this.delegate = new StringPermission(name, actions);
}
@Override
public boolean implies(Permission permission) {
if (permission instanceof CustomPermission) {
return delegate.implies(((CustomPermission) permission).delegate);
}
return false;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
CustomPermission that = (CustomPermission) o;
return delegate.equals(that.delegate);
}
@Override
public int hashCode() {
return Objects.hash(delegate);
}
@Override
public String getActions() {
return delegate.getActions();
}
}
@Singleton
public static
|
CustomPermission
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java
|
{
"start": 1236,
"end": 7806
}
|
class ____ extends BroadcastRequest<IndicesStatsRequest> {
private CommonStatsFlags flags = new CommonStatsFlags();
public IndicesStatsRequest() {
super((String[]) null);
}
public IndicesStatsRequest(StreamInput in) throws IOException {
super(in);
flags = new CommonStatsFlags(in);
}
/**
* Sets all flags to return all stats.
*/
public IndicesStatsRequest all() {
flags.all();
return this;
}
/**
* Clears all stats.
*/
public IndicesStatsRequest clear() {
flags.clear();
return this;
}
/**
* Returns the underlying stats flags.
*/
public CommonStatsFlags flags() {
return flags;
}
/**
* Sets the underlying stats flags.
*/
public IndicesStatsRequest flags(CommonStatsFlags flags) {
this.flags = flags;
return this;
}
/**
* Sets specific search group stats to retrieve the stats for. Mainly affects search
* when enabled.
*/
public IndicesStatsRequest groups(String... groups) {
flags.groups(groups);
return this;
}
public String[] groups() {
return this.flags.groups();
}
public IndicesStatsRequest docs(boolean docs) {
flags.set(Flag.Docs, docs);
return this;
}
public boolean docs() {
return flags.isSet(Flag.Docs);
}
public IndicesStatsRequest store(boolean store) {
flags.set(Flag.Store, store);
return this;
}
public boolean store() {
return flags.isSet(Flag.Store);
}
public IndicesStatsRequest indexing(boolean indexing) {
flags.set(Flag.Indexing, indexing);
return this;
}
public boolean indexing() {
return flags.isSet(Flag.Indexing);
}
public IndicesStatsRequest get(boolean get) {
flags.set(Flag.Get, get);
return this;
}
public boolean get() {
return flags.isSet(Flag.Get);
}
public IndicesStatsRequest search(boolean search) {
flags.set(Flag.Search, search);
return this;
}
public boolean search() {
return flags.isSet(Flag.Search);
}
public IndicesStatsRequest merge(boolean merge) {
flags.set(Flag.Merge, merge);
return this;
}
public boolean merge() {
return flags.isSet(Flag.Merge);
}
public IndicesStatsRequest refresh(boolean refresh) {
flags.set(Flag.Refresh, refresh);
return this;
}
public boolean refresh() {
return flags.isSet(Flag.Refresh);
}
public IndicesStatsRequest flush(boolean flush) {
flags.set(Flag.Flush, flush);
return this;
}
public boolean flush() {
return flags.isSet(Flag.Flush);
}
public IndicesStatsRequest warmer(boolean warmer) {
flags.set(Flag.Warmer, warmer);
return this;
}
public boolean warmer() {
return flags.isSet(Flag.Warmer);
}
public IndicesStatsRequest queryCache(boolean queryCache) {
flags.set(Flag.QueryCache, queryCache);
return this;
}
public boolean queryCache() {
return flags.isSet(Flag.QueryCache);
}
public IndicesStatsRequest fieldData(boolean fieldData) {
flags.set(Flag.FieldData, fieldData);
return this;
}
public boolean fieldData() {
return flags.isSet(Flag.FieldData);
}
public IndicesStatsRequest segments(boolean segments) {
flags.set(Flag.Segments, segments);
return this;
}
public boolean segments() {
return flags.isSet(Flag.Segments);
}
public IndicesStatsRequest fieldDataFields(String... fieldDataFields) {
flags.fieldDataFields(fieldDataFields);
return this;
}
public String[] fieldDataFields() {
return flags.fieldDataFields();
}
public IndicesStatsRequest completion(boolean completion) {
flags.set(Flag.Completion, completion);
return this;
}
public boolean completion() {
return flags.isSet(Flag.Completion);
}
public IndicesStatsRequest completionFields(String... completionDataFields) {
flags.completionDataFields(completionDataFields);
return this;
}
public String[] completionFields() {
return flags.completionDataFields();
}
public IndicesStatsRequest translog(boolean translog) {
flags.set(Flag.Translog, translog);
return this;
}
public boolean translog() {
return flags.isSet(Flag.Translog);
}
public IndicesStatsRequest requestCache(boolean requestCache) {
flags.set(Flag.RequestCache, requestCache);
return this;
}
public boolean requestCache() {
return flags.isSet(Flag.RequestCache);
}
public IndicesStatsRequest recovery(boolean recovery) {
flags.set(Flag.Recovery, recovery);
return this;
}
public boolean recovery() {
return flags.isSet(Flag.Recovery);
}
public IndicesStatsRequest bulk(boolean bulk) {
flags.set(Flag.Bulk, bulk);
return this;
}
public boolean bulk() {
return flags.isSet(Flag.Bulk);
}
public boolean includeSegmentFileSizes() {
return flags.includeSegmentFileSizes();
}
public IndicesStatsRequest includeSegmentFileSizes(boolean includeSegmentFileSizes) {
flags.includeSegmentFileSizes(includeSegmentFileSizes);
return this;
}
public IndicesStatsRequest includeUnloadedSegments(boolean includeUnloadedSegments) {
flags.includeUnloadedSegments(includeUnloadedSegments);
return this;
}
public IndicesStatsRequest denseVector(boolean denseVector) {
flags.set(Flag.DenseVector, denseVector);
return this;
}
public boolean denseVector() {
return flags.isSet(Flag.DenseVector);
}
public IndicesStatsRequest sparseVector(boolean sparseVector) {
flags.set(Flag.SparseVector, sparseVector);
return this;
}
public boolean sparseVector() {
return flags.isSet(Flag.SparseVector);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
flags.writeTo(out);
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
return new CancellableTask(id, type, action, "", parentTaskId, headers);
}
}
|
IndicesStatsRequest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/utils/TransformStrings.java
|
{
"start": 454,
"end": 1558
}
|
class ____ {
/**
* Valid user id pattern.
* Matches a string that contains lowercase characters, digits, hyphens, underscores or dots.
* The string may start and end only in characters or digits.
* Note that '.' is allowed but not documented.
*/
private static final Pattern VALID_ID_CHAR_PATTERN = Pattern.compile("[a-z0-9](?:[a-z0-9_\\-\\.]*[a-z0-9])?");
public static final int ID_LENGTH_LIMIT = 64;
private TransformStrings() {}
public static boolean isValidId(String id) {
return id != null && VALID_ID_CHAR_PATTERN.matcher(id).matches() && Metadata.ALL.equals(id) == false;
}
/**
* Checks if the given {@code id} has a valid length.
* We keep IDs in a length shorter or equal than {@link #ID_LENGTH_LIMIT}
* in order to avoid unfriendly errors when storing docs with
* more than 512 bytes.
*
* @param id the id
* @return {@code true} if the id has a valid length
*/
public static boolean hasValidLengthForId(String id) {
return id.length() <= ID_LENGTH_LIMIT;
}
}
|
TransformStrings
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/InconsistentCapitalizationTest.java
|
{
"start": 7125,
"end": 7383
}
|
class ____ {
Object aa;
Test(Object aA) {
aa = aA;
if (aA == aa) {}
}
}
""")
.addOutputLines(
"out/Test.java",
"""
|
Test
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/header/writers/StaticHeaderWriterTests.java
|
{
"start": 1281,
"end": 4059
}
|
class ____ {
private MockHttpServletRequest request;
private MockHttpServletResponse response;
@BeforeEach
public void setup() {
this.request = new MockHttpServletRequest();
this.response = new MockHttpServletResponse();
}
@Test
public void constructorNullHeaders() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter(null));
}
@Test
public void constructorEmptyHeaders() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter(Collections.<Header>emptyList()));
}
@Test
public void constructorNullHeaderName() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter(null, "value1"));
}
@Test
public void constructorNullHeaderValues() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter("name", (String[]) null));
}
@Test
public void constructorContainsNullHeaderValue() {
assertThatIllegalArgumentException().isThrownBy(() -> new StaticHeadersWriter("name", "value1", null));
}
@Test
public void sameHeaderShouldBeReturned() {
String headerName = "X-header";
String headerValue = "foo";
StaticHeadersWriter factory = new StaticHeadersWriter(headerName, headerValue);
factory.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderValues(headerName)).isEqualTo(Arrays.asList(headerValue));
}
@Test
public void writeHeadersMulti() {
Header pragma = new Header("Pragma", "no-cache");
Header cacheControl = new Header("Cache-Control", "no-cache", "no-store", "must-revalidate");
StaticHeadersWriter factory = new StaticHeadersWriter(Arrays.asList(pragma, cacheControl));
factory.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(2);
assertThat(this.response.getHeaderValues(pragma.getName())).isEqualTo(pragma.getValues());
assertThat(this.response.getHeaderValues(cacheControl.getName())).isEqualTo(cacheControl.getValues());
}
@Test
public void writeHeaderWhenNotPresent() {
String pragmaValue = new String("pragmaValue");
String cacheControlValue = new String("cacheControlValue");
this.response.setHeader("Pragma", pragmaValue);
this.response.setHeader("Cache-Control", cacheControlValue);
Header pragma = new Header("Pragma", "no-cache");
Header cacheControl = new Header("Cache-Control", "no-cache", "no-store", "must-revalidate");
StaticHeadersWriter factory = new StaticHeadersWriter(Arrays.asList(pragma, cacheControl));
factory.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderNames()).hasSize(2);
assertThat(this.response.getHeader("Pragma")).isSameAs(pragmaValue);
assertThat(this.response.getHeader("Cache-Control")).isSameAs(cacheControlValue);
}
}
|
StaticHeaderWriterTests
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/Branched.java
|
{
"start": 969,
"end": 1155
}
|
class ____ used to define the optional parameters when building branches with
* {@link BranchedKStream}.
*
* @param <K> type of record key
* @param <V> type of record value
*/
public
|
is
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/build-support/src/main/java/org/jboss/resteasy/reactive/build/support/AllWriteableMessageBodyWriterByteBuddyPlugin.java
|
{
"start": 5290,
"end": 6655
}
|
class ____ extends MethodVisitor {
private final AtomicBoolean result;
private int insnCount = 0;
private boolean firstIsLoad1OnToStack = false;
private boolean secondIsIReturn = false;
private final CodeSizeEvaluator codeSizeEvaluator;
private MessageBodyWriterIsWriteableMethodVisitor(CodeSizeEvaluator superMethodVisitor, AtomicBoolean result) {
super(OpenedClassReader.ASM_API, superMethodVisitor);
this.codeSizeEvaluator = superMethodVisitor;
this.result = result;
}
@Override
public void visitInsn(int opcode) {
insnCount++;
if ((opcode == Opcodes.ICONST_1) && insnCount == 1) {
firstIsLoad1OnToStack = true;
} else if ((opcode == Opcodes.IRETURN) && insnCount == 2) {
secondIsIReturn = true;
}
super.visitInsn(opcode);
}
@Override
public void visitEnd() {
super.visitEnd();
result.set(
(insnCount == 2) && firstIsLoad1OnToStack && (secondIsIReturn) &&
// ensures that no other instruction was visited
(codeSizeEvaluator.getMaxSize() == 2) && (codeSizeEvaluator.getMinSize() == 2));
}
}
}
|
MessageBodyWriterIsWriteableMethodVisitor
|
java
|
quarkusio__quarkus
|
extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/services/ServiceInitiatorsTest.java
|
{
"start": 445,
"end": 1874
}
|
class ____ {
private static final Map<String, String> HR_SERVICES = toServicesMap(ReactiveServiceInitiators.LIST);
private static final Map<String, String> ORM_SERVICES = toServicesMap(StandardServiceInitiators.LIST);
private static final Map<String, String> QUARKUS_HR_SERVICES = toServicesMap(ReactiveServiceInitiators.LIST);
// These services are NOT provided by the Hibernate Reactive default initiators, and that should be fine:
private static final Set<String> HR_INTENTIONALLY_OMITTED = Set
.of("org.hibernate.engine.transaction.jta.platform.spi.JtaPlatformResolver");
@Test
public void serviceInitiatorsAreUnique() {
Assertions.assertEquals(HR_SERVICES.size(), ReactiveServiceInitiators.LIST.size());
Assertions.assertEquals(ORM_SERVICES.size(), StandardServiceInitiators.LIST.size());
Assertions.assertEquals(ORM_SERVICES.size(), StandardServiceInitiators.LIST.size());
}
private static Map<String, String> toServicesMap(List<StandardServiceInitiator<?>> list) {
TreeMap<String, String> rolesToImplMap = new TreeMap<>();
for (StandardServiceInitiator<?> initiator : list) {
final String serviceRole = initiator.getServiceInitiated().getName();
rolesToImplMap.put(serviceRole, initiator.getClass().getName());
}
return Collections.unmodifiableMap(rolesToImplMap);
}
}
|
ServiceInitiatorsTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ComparableTypeTest.java
|
{
"start": 2660,
"end": 2781
}
|
class ____ extends A implements Comparable<B> {
@Override
public int compareTo(B o) {
return 0;
}
}
|
C
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/foreach/noqualifier/Foo.java
|
{
"start": 215,
"end": 322
}
|
class ____ implements MyService {
@Override
public String getName() {
return "foo";
}
}
|
Foo
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/Spr15275Tests.java
|
{
"start": 4199,
"end": 4703
}
|
class ____ {
@Bean
public FactoryBean<FooInterface> foo() {
return new AbstractFactoryBean<>() {
@Override
public FooInterface createInstance() {
return new Foo("x");
}
@Override
public Class<?> getObjectType() {
return FooInterface.class;
}
};
}
@Bean
public Bar bar() throws Exception {
assertThat(foo().isSingleton()).isTrue();
return new Bar(foo().getObject());
}
}
@Configuration
protected static
|
ConfigWithAbstractFactoryBeanForInterface
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/ForwardingNavigableSet.java
|
{
"start": 1599,
"end": 2400
}
|
class ____ <i>not</i> forward calls to {@code
* default} methods. Instead, it inherits their default implementations. When those implementations
* invoke methods, they invoke methods on the {@code ForwardingNavigableSet}.
*
* <p>Each of the {@code standard} methods uses the set's comparator (or the natural ordering of the
* elements, if there is no comparator) to test element equality. As a result, if the comparator is
* not consistent with equals, some of the standard implementations may violate the {@code Set}
* contract.
*
* <p>The {@code standard} methods and the collection views they return are not guaranteed to be
* thread-safe, even when all of the methods that they depend on are thread-safe.
*
* @author Louis Wasserman
* @since 12.0
*/
@GwtIncompatible
public abstract
|
does
|
java
|
dropwizard__dropwizard
|
dropwizard-jersey/src/main/java/io/dropwizard/jersey/DropwizardResourceConfig.java
|
{
"start": 5542,
"end": 6060
}
|
class ____ passed through as an object, cast to Class and register directly
if (component instanceof Class<?> classObj) {
return super.register(classObj);
} else if (Providers.isProvider(clazz) || org.glassfish.hk2.utilities.Binder.class.isAssignableFrom(clazz)) {
// If Jersey supports this component's class (including Binders), register directly
return super.register(object);
} else {
// Else register a binder that binds the instance to its
|
gets
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/util/ObjectArrayPriorityQueue.java
|
{
"start": 1122,
"end": 1312
}
|
class ____ track the {@link ObjectArray} and not the memory usage of the elements. Furthermore,
* the elements are not closed even if they implement {@link Releasable}.
*/
public abstract
|
only
|
java
|
netty__netty
|
codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2HeadersDecoder.java
|
{
"start": 1056,
"end": 9560
}
|
class ____ implements Http2HeadersDecoder, Http2HeadersDecoder.Configuration {
private static final float HEADERS_COUNT_WEIGHT_NEW = 1 / 5f;
private static final float HEADERS_COUNT_WEIGHT_HISTORICAL = 1 - HEADERS_COUNT_WEIGHT_NEW;
private final HpackDecoder hpackDecoder;
private final boolean validateHeaders;
private final boolean validateHeaderValues;
private long maxHeaderListSizeGoAway;
/**
* Used to calculate an exponential moving average of header sizes to get an estimate of how large the data
* structure for storing headers should be.
*/
private float headerArraySizeAccumulator = 8;
public DefaultHttp2HeadersDecoder() {
this(true);
}
/**
* Create a new instance.
* @param validateHeaders {@code true} to validate headers are valid according to the RFC.
*/
public DefaultHttp2HeadersDecoder(boolean validateHeaders) {
this(validateHeaders, DEFAULT_HEADER_LIST_SIZE);
}
/**
* Create a new instance.
*
* @param validateHeaders {@code true} to validate headers are valid according to the RFC.
* This validates everything except header values.
* @param validateHeaderValues {@code true} to validate that header <em>values</em> are valid according to the RFC.
* Since this is potentially expensive, it can be enabled separately from {@code validateHeaders}.
*/
public DefaultHttp2HeadersDecoder(boolean validateHeaders, boolean validateHeaderValues) {
this(validateHeaders, validateHeaderValues, DEFAULT_HEADER_LIST_SIZE);
}
/**
* Create a new instance.
* @param validateHeaders {@code true} to validate headers are valid according to the RFC.
* @param maxHeaderListSize This is the only setting that can be configured before notifying the peer.
* This is because <a href="https://tools.ietf.org/html/rfc7540#section-6.5.1">SETTINGS_MAX_HEADER_LIST_SIZE</a>
* allows a lower than advertised limit from being enforced, and the default limit is unlimited
* (which is dangerous).
*/
public DefaultHttp2HeadersDecoder(boolean validateHeaders, long maxHeaderListSize) {
this(validateHeaders, false, new HpackDecoder(maxHeaderListSize));
}
/**
* Create a new instance.
* @param validateHeaders {@code true} to validate headers are valid according to the RFC.
* This validates everything except header values.
* @param validateHeaderValues {@code true} to validate that header <em>values</em> are valid according to the RFC.
* Since this is potentially expensive, it can be enabled separately from {@code validateHeaders}.
* @param maxHeaderListSize This is the only setting that can be configured before notifying the peer.
* This is because <a href="https://tools.ietf.org/html/rfc7540#section-6.5.1">SETTINGS_MAX_HEADER_LIST_SIZE</a>
* allows a lower than advertised limit from being enforced, and the default limit is unlimited
* (which is dangerous).
*/
public DefaultHttp2HeadersDecoder(boolean validateHeaders, boolean validateHeaderValues, long maxHeaderListSize) {
this(validateHeaders, validateHeaderValues, new HpackDecoder(maxHeaderListSize));
}
/**
* Create a new instance.
* @param validateHeaders {@code true} to validate headers are valid according to the RFC.
* This validates everything except header values.
* @param maxHeaderListSize This is the only setting that can be configured before notifying the peer.
* This is because <a href="https://tools.ietf.org/html/rfc7540#section-6.5.1">SETTINGS_MAX_HEADER_LIST_SIZE</a>
* allows a lower than advertised limit from being enforced, and the default limit is unlimited
* (which is dangerous).
* @param initialHuffmanDecodeCapacity Does nothing, do not use.
*/
public DefaultHttp2HeadersDecoder(boolean validateHeaders, long maxHeaderListSize,
@Deprecated int initialHuffmanDecodeCapacity) {
this(validateHeaders, false, new HpackDecoder(maxHeaderListSize));
}
/**
* Exposed for testing only! Default values used in the initial settings frame are overridden intentionally
* for testing but violate the RFC if used outside the scope of testing.
*/
DefaultHttp2HeadersDecoder(boolean validateHeaders, boolean validateHeaderValues, HpackDecoder hpackDecoder) {
this.hpackDecoder = ObjectUtil.checkNotNull(hpackDecoder, "hpackDecoder");
this.validateHeaders = validateHeaders;
this.validateHeaderValues = validateHeaderValues;
maxHeaderListSizeGoAway =
Http2CodecUtil.calculateMaxHeaderListSizeGoAway(hpackDecoder.getMaxHeaderListSize());
}
@Override
public void maxHeaderTableSize(long max) throws Http2Exception {
hpackDecoder.setMaxHeaderTableSize(max);
}
@Override
public long maxHeaderTableSize() {
return hpackDecoder.getMaxHeaderTableSize();
}
@Override
public void maxHeaderListSize(long max, long goAwayMax) throws Http2Exception {
if (goAwayMax < max || goAwayMax < 0) {
throw connectionError(INTERNAL_ERROR, "Header List Size GO_AWAY %d must be non-negative and >= %d",
goAwayMax, max);
}
hpackDecoder.setMaxHeaderListSize(max);
maxHeaderListSizeGoAway = goAwayMax;
}
@Override
public long maxHeaderListSize() {
return hpackDecoder.getMaxHeaderListSize();
}
@Override
public long maxHeaderListSizeGoAway() {
return maxHeaderListSizeGoAway;
}
@Override
public Configuration configuration() {
return this;
}
@Override
public Http2Headers decodeHeaders(int streamId, ByteBuf headerBlock) throws Http2Exception {
try {
final Http2Headers headers = newHeaders();
hpackDecoder.decode(streamId, headerBlock, headers, validateHeaders);
headerArraySizeAccumulator = HEADERS_COUNT_WEIGHT_NEW * headers.size() +
HEADERS_COUNT_WEIGHT_HISTORICAL * headerArraySizeAccumulator;
return headers;
} catch (Http2Exception e) {
throw e;
} catch (Throwable e) {
// Default handler for any other types of errors that may have occurred. For example,
// the Header builder throws IllegalArgumentException if the key or value was invalid
// for any reason (e.g. the key was an invalid pseudo-header).
throw connectionError(COMPRESSION_ERROR, e, "Error decoding headers: %s", e.getMessage());
}
}
/**
* A weighted moving average estimating how many headers are expected during the decode process.
* @return an estimate of how many headers are expected during the decode process.
*/
protected final int numberOfHeadersGuess() {
return (int) headerArraySizeAccumulator;
}
/**
* Determines if the headers should be validated as a result of the decode operation.
* <p>
* <strong>Note:</strong> This does not include validation of header <em>values</em>, since that is potentially
* expensive to do. Value validation is instead {@linkplain #validateHeaderValues() enabled separately}.
*
* @return {@code true} if the headers should be validated as a result of the decode operation.
*/
protected final boolean validateHeaders() {
return validateHeaders;
}
/**
* Determines if the header values should be validated as a result of the decode operation.
* <p>
* <strong>Note:</strong> This <em>only</em> validates the values of headers. All other header validations are
* instead {@linkplain #validateHeaders() enabled separately}.
*
* @return {@code true} if the header values should be validated as a result of the decode operation.
*/
protected boolean validateHeaderValues() { // Not 'final' due to backwards compatibility.
return validateHeaderValues;
}
/**
* Create a new {@link Http2Headers} object which will store the results of the decode operation.
* @return a new {@link Http2Headers} object which will store the results of the decode operation.
*/
protected Http2Headers newHeaders() {
return new DefaultHttp2Headers(validateHeaders, validateHeaderValues, (int) headerArraySizeAccumulator);
}
}
|
DefaultHttp2HeadersDecoder
|
java
|
apache__flink
|
flink-test-utils-parent/flink-connector-test-utils/src/test/java/org/apache/flink/connector/testframe/utils/CollectIteratorAssertTest.java
|
{
"start": 1446,
"end": 7172
}
|
class ____ {
private final List<String> splitA = Arrays.asList("alpha", "beta", "gamma");
private final List<String> splitB = Arrays.asList("one", "two", "three");
private final List<String> splitC = Arrays.asList("1", "2", "3");
private final List<List<String>> testDataCollection = Arrays.asList(splitA, splitB, splitC);
@Test
public void testDataMatcherWithExactlyOnceSemantic() {
final List<String> result = unionLists(splitA, splitB, splitC);
assertThat(result.iterator())
.matchesRecordsFromSource(testDataCollection, CheckpointingMode.EXACTLY_ONCE);
}
@Test
public void testDataMatcherWithAtLeastOnceSemantic() {
final List<String> result = unionLists(splitA, splitB, splitC, splitA);
assertThat(result.iterator())
.matchesRecordsFromSource(testDataCollection, CheckpointingMode.AT_LEAST_ONCE);
}
@Test
public void testResultLessThanExpected() {
final ArrayList<String> splitATestDataWithoutLast = new ArrayList<>(splitA);
splitATestDataWithoutLast.remove(splitA.size() - 1);
final List<String> result = unionLists(splitATestDataWithoutLast, splitB, splitC);
Assertions.assertThatThrownBy(
() ->
assertThat(result.iterator())
.matchesRecordsFromSource(
testDataCollection,
CheckpointingMode.EXACTLY_ONCE))
.hasMessageContaining(
"Expected to have exactly 9 records in result, but only received 8 records\n"
+ "Current progress of multiple split test data validation:\n"
+ "Split 0 (2/3): \n"
+ "alpha\n"
+ "beta\n"
+ "gamma\t<----\n"
+ "Split 1 (3/3): \n"
+ "one\n"
+ "two\n"
+ "three\n"
+ "Split 2 (3/3): \n"
+ "1\n"
+ "2\n"
+ "3\n");
}
@Test
public void testResultMoreThanExpected() {
final List<String> result = unionLists(splitA, splitB, splitC);
result.add("delta");
Assertions.assertThatThrownBy(
() ->
assertThat(result.iterator())
.matchesRecordsFromSource(
testDataCollection,
CheckpointingMode.EXACTLY_ONCE))
.hasMessageContaining(
"Expected to have exactly 9 records in result, but received more records\n"
+ "Current progress of multiple split test data validation:\n"
+ "Split 0 (3/3): \n"
+ "alpha\n"
+ "beta\n"
+ "gamma\n"
+ "Split 1 (3/3): \n"
+ "one\n"
+ "two\n"
+ "three\n"
+ "Split 2 (3/3): \n"
+ "1\n"
+ "2\n"
+ "3\n");
}
@Test
public void testOutOfOrder() {
List<String> reverted = new ArrayList<>(splitC);
Collections.reverse(reverted);
final List<String> result = unionLists(splitA, splitB, reverted);
Assertions.assertThatThrownBy(
() ->
assertThat(result.iterator())
.matchesRecordsFromSource(
testDataCollection,
CheckpointingMode.EXACTLY_ONCE))
.hasMessageContaining(
"Unexpected record '3' at position 6\n"
+ "Current progress of multiple split test data validation:\n"
+ "Split 0 (3/3): \n"
+ "alpha\n"
+ "beta\n"
+ "gamma\n"
+ "Split 1 (3/3): \n"
+ "one\n"
+ "two\n"
+ "three\n"
+ "Split 2 (0/3): \n"
+ "1\t<----\n"
+ "2\n"
+ "3\n"
+ "Remaining received elements after the unexpected one: \n"
+ "2\n"
+ "1\n");
}
}
@SafeVarargs
private final <T> List<T> unionLists(List<T>... lists) {
return Stream.of(lists).flatMap(Collection::stream).collect(Collectors.toList());
}
}
|
MultipleSplitDataMatcherTest
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/inference/strategies/ToTimestampLtzTypeStrategyTest.java
|
{
"start": 1069,
"end": 6085
}
|
class ____ extends TypeStrategiesTestBase {
@Override
protected Stream<TestSpec> testData() {
return Stream.of(
TestSpec.forStrategy(
"Valid single argument of type <VARCHAR> or <CHAR>",
SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.STRING())
.expectDataType(DataTypes.TIMESTAMP_LTZ(3).nullable()),
TestSpec.forStrategy(
"TO_TIMESTAMP_LTZ(<NUMERIC>)",
SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.BIGINT())
.expectDataType(DataTypes.TIMESTAMP_LTZ(3).nullable()),
TestSpec.forStrategy(
"Invalid single argument type",
SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.BOOLEAN())
.expectErrorMessage(
"Unsupported argument type. When taking 1 argument, TO_TIMESTAMP_LTZ accepts an argument of type <VARCHAR>, <CHAR>, or <NUMERIC>."),
TestSpec.forStrategy(
"TO_TIMESTAMP_LTZ(<NUMERIC>, <INTEGER>)",
SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.DOUBLE(), DataTypes.INT())
.expectDataType(DataTypes.TIMESTAMP_LTZ(3).nullable()),
TestSpec.forStrategy(
"Valid two arguments of <VARCHAR> or <CHAR>",
SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.STRING(), DataTypes.STRING())
.expectDataType(DataTypes.TIMESTAMP_LTZ(3).nullable()),
TestSpec.forStrategy(
"Invalid second argument when the first argument is <NUMERIC>",
SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.BIGINT(), DataTypes.STRING())
.expectErrorMessage(
"Unsupported argument type. TO_TIMESTAMP_LTZ(<NUMERIC>, <INTEGER>) requires the second argument to be <INTEGER>."),
TestSpec.forStrategy(
"Invalid second argument when the first argument is <VARCHAR> or <CHAR>",
SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.STRING(), DataTypes.FLOAT())
.expectErrorMessage(
"Unsupported argument type. If the first argument is of type <VARCHAR> or <CHAR>, TO_TIMESTAMP_LTZ requires the second argument to be of type <VARCHAR> or <CHAR>."),
TestSpec.forStrategy(
"Invalid first argument when taking 2 arguments",
SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.BOOLEAN(), DataTypes.FLOAT())
.expectErrorMessage(
"Unsupported argument type. When taking 2 arguments, TO_TIMESTAMP_LTZ requires the first argument to be of type <VARCHAR>, <CHAR>, or <NUMERIC>."),
TestSpec.forStrategy(
"Valid three arguments", SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING())
.expectDataType(DataTypes.TIMESTAMP_LTZ(3).nullable()),
TestSpec.forStrategy(
"Invalid three arguments", SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING())
.expectErrorMessage(
"Unsupported argument type. When taking 3 arguments, TO_TIMESTAMP_LTZ requires all three arguments to be of type <VARCHAR> or <CHAR>."),
TestSpec.forStrategy("No arguments", SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes()
.expectErrorMessage(
"Unsupported argument type. TO_TIMESTAMP_LTZ requires 1 to 3 arguments, but 0 were provided."),
TestSpec.forStrategy("Too many arguments", SpecificTypeStrategies.TO_TIMESTAMP_LTZ)
.inputTypes(
DataTypes.STRING(),
DataTypes.STRING(),
DataTypes.STRING(),
DataTypes.STRING())
.expectErrorMessage(
"Unsupported argument type. TO_TIMESTAMP_LTZ requires 1 to 3 arguments, but 4 were provided."));
}
}
|
ToTimestampLtzTypeStrategyTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/demo/sql/PGVisitorDemo.java
|
{
"start": 1008,
"end": 1435
}
|
class ____ extends PGASTVisitorAdapter {
private Map<String, SQLTableSource> aliasMap = new HashMap<String, SQLTableSource>();
public boolean visit(SQLExprTableSource x) {
String alias = x.getAlias();
aliasMap.put(alias, x);
return true;
}
public Map<String, SQLTableSource> getAliasMap() {
return aliasMap;
}
}
}
|
ExportTableAliasVisitor
|
java
|
grpc__grpc-java
|
util/src/main/java/io/grpc/util/OutlierDetectionLoadBalancerProvider.java
|
{
"start": 1270,
"end": 6372
}
|
class ____ extends LoadBalancerProvider {
@Override
public LoadBalancer newLoadBalancer(Helper helper) {
return new OutlierDetectionLoadBalancer(helper, Ticker.systemTicker());
}
@Override
public boolean isAvailable() {
return true;
}
@Override
public int getPriority() {
return 5;
}
@Override
public String getPolicyName() {
return "outlier_detection_experimental";
}
@Override
public ConfigOrError parseLoadBalancingPolicyConfig(Map<String, ?> rawConfig) {
try {
return parseLoadBalancingPolicyConfigInternal(rawConfig);
} catch (RuntimeException e) {
return ConfigOrError.fromError(
Status.UNAVAILABLE.withCause(e).withDescription(
"Failed parsing configuration for " + getPolicyName()));
}
}
private ConfigOrError parseLoadBalancingPolicyConfigInternal(Map<String, ?> rawConfig) {
// Common configuration.
Long intervalNanos = JsonUtil.getStringAsDuration(rawConfig, "interval");
Long baseEjectionTimeNanos = JsonUtil.getStringAsDuration(rawConfig, "baseEjectionTime");
Long maxEjectionTimeNanos = JsonUtil.getStringAsDuration(rawConfig, "maxEjectionTime");
Integer maxEjectionPercentage = JsonUtil.getNumberAsInteger(rawConfig,
"maxEjectionPercentage");
OutlierDetectionLoadBalancerConfig.Builder configBuilder
= new OutlierDetectionLoadBalancerConfig.Builder();
if (intervalNanos != null) {
configBuilder.setIntervalNanos(intervalNanos);
}
if (baseEjectionTimeNanos != null) {
configBuilder.setBaseEjectionTimeNanos(baseEjectionTimeNanos);
}
if (maxEjectionTimeNanos != null) {
configBuilder.setMaxEjectionTimeNanos(maxEjectionTimeNanos);
}
if (maxEjectionPercentage != null) {
configBuilder.setMaxEjectionPercent(maxEjectionPercentage);
}
// Success rate ejection specific configuration.
Map<String, ?> rawSuccessRateEjection = JsonUtil.getObject(rawConfig, "successRateEjection");
if (rawSuccessRateEjection != null) {
SuccessRateEjection.Builder successRateEjectionBuilder = new SuccessRateEjection.Builder();
Integer stdevFactor = JsonUtil.getNumberAsInteger(rawSuccessRateEjection, "stdevFactor");
Integer enforcementPercentage = JsonUtil.getNumberAsInteger(rawSuccessRateEjection,
"enforcementPercentage");
Integer minimumHosts = JsonUtil.getNumberAsInteger(rawSuccessRateEjection, "minimumHosts");
Integer requestVolume = JsonUtil.getNumberAsInteger(rawSuccessRateEjection, "requestVolume");
if (stdevFactor != null) {
successRateEjectionBuilder.setStdevFactor(stdevFactor);
}
if (enforcementPercentage != null) {
successRateEjectionBuilder.setEnforcementPercentage(enforcementPercentage);
}
if (minimumHosts != null) {
successRateEjectionBuilder.setMinimumHosts(minimumHosts);
}
if (requestVolume != null) {
successRateEjectionBuilder.setRequestVolume(requestVolume);
}
configBuilder.setSuccessRateEjection(successRateEjectionBuilder.build());
}
// Failure percentage ejection specific configuration.
Map<String, ?> rawFailurePercentageEjection = JsonUtil.getObject(rawConfig,
"failurePercentageEjection");
if (rawFailurePercentageEjection != null) {
FailurePercentageEjection.Builder failurePercentageEjectionBuilder
= new FailurePercentageEjection.Builder();
Integer threshold = JsonUtil.getNumberAsInteger(rawFailurePercentageEjection, "threshold");
Integer enforcementPercentage = JsonUtil.getNumberAsInteger(rawFailurePercentageEjection,
"enforcementPercentage");
Integer minimumHosts = JsonUtil.getNumberAsInteger(rawFailurePercentageEjection,
"minimumHosts");
Integer requestVolume = JsonUtil.getNumberAsInteger(rawFailurePercentageEjection,
"requestVolume");
if (threshold != null) {
failurePercentageEjectionBuilder.setThreshold(threshold);
}
if (enforcementPercentage != null) {
failurePercentageEjectionBuilder.setEnforcementPercentage(enforcementPercentage);
}
if (minimumHosts != null) {
failurePercentageEjectionBuilder.setMinimumHosts(minimumHosts);
}
if (requestVolume != null) {
failurePercentageEjectionBuilder.setRequestVolume(requestVolume);
}
configBuilder.setFailurePercentageEjection(failurePercentageEjectionBuilder.build());
}
// Child load balancer configuration.
ConfigOrError childConfig = GracefulSwitchLoadBalancer.parseLoadBalancingPolicyConfig(
JsonUtil.getListOfObjects(rawConfig, "childPolicy"));
if (childConfig.getError() != null) {
return ConfigOrError.fromError(GrpcUtil.statusWithDetails(
Status.Code.UNAVAILABLE,
"Failed to parse child in outlier_detection_experimental",
childConfig.getError()));
}
configBuilder.setChildConfig(childConfig.getConfig());
return ConfigOrError.fromConfig(configBuilder.build());
}
}
|
OutlierDetectionLoadBalancerProvider
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/metrics/StateTimeMetricTest.java
|
{
"start": 5428,
"end": 6329
}
|
class ____ {
@Nullable private final Gauge<Long> state;
@Nullable private final Gauge<Long> currentTime;
@Nullable private final Gauge<Long> totalTime;
private StatusMetricSet(
@Nullable Gauge<Long> state,
@Nullable Gauge<Long> currentTime,
@Nullable Gauge<Long> totalTime) {
this.state = state;
this.currentTime = currentTime;
this.totalTime = totalTime;
}
@Nullable
public Optional<Gauge<Long>> getState() {
return Optional.ofNullable(state);
}
@Nullable
public Optional<Gauge<Long>> getCurrentTime() {
return Optional.ofNullable(currentTime);
}
@Nullable
public Optional<Gauge<Long>> getTotalTime() {
return Optional.ofNullable(totalTime);
}
}
}
|
StatusMetricSet
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableFlatMapCompletableCompletable.java
|
{
"start": 1393,
"end": 2401
}
|
class ____<T> extends Completable implements FuseToFlowable<T> {
final Flowable<T> source;
final Function<? super T, ? extends CompletableSource> mapper;
final int maxConcurrency;
final boolean delayErrors;
public FlowableFlatMapCompletableCompletable(Flowable<T> source,
Function<? super T, ? extends CompletableSource> mapper, boolean delayErrors,
int maxConcurrency) {
this.source = source;
this.mapper = mapper;
this.delayErrors = delayErrors;
this.maxConcurrency = maxConcurrency;
}
@Override
protected void subscribeActual(CompletableObserver observer) {
source.subscribe(new FlatMapCompletableMainSubscriber<>(observer, mapper, delayErrors, maxConcurrency));
}
@Override
public Flowable<T> fuseToFlowable() {
return RxJavaPlugins.onAssembly(new FlowableFlatMapCompletable<>(source, mapper, delayErrors, maxConcurrency));
}
static final
|
FlowableFlatMapCompletableCompletable
|
java
|
alibaba__nacos
|
core/src/main/java/com/alibaba/nacos/core/utils/ReuseHttpServletRequest.java
|
{
"start": 1410,
"end": 4287
}
|
class ____ extends HttpServletRequestWrapper implements ReuseHttpRequest {
private final HttpServletRequest target;
private byte[] body;
private Map<String, String[]> stringMap;
/**
* Constructs a request object wrapping the given request.
*
* @param request The request to wrap
* @throws IllegalArgumentException if the request is null
*/
public ReuseHttpServletRequest(HttpServletRequest request) throws IOException {
super(request);
this.target = request;
this.body = toBytes(request.getInputStream());
this.stringMap = toDuplication(request);
}
@Override
public Object getBody() throws Exception {
if (StringUtils.containsIgnoreCase(target.getContentType(), MediaType.MULTIPART_FORM_DATA)) {
return target.getParts();
} else {
String s = ByteUtils.toString(body);
if (StringUtils.isBlank(s)) {
return HttpUtils
.encodingParams(HttpUtils.translateParameterMap(stringMap), StandardCharsets.UTF_8.name());
}
return s;
}
}
private byte[] toBytes(InputStream inputStream) throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int n = 0;
while ((n = inputStream.read(buffer)) != -1) {
bos.write(buffer, 0, n);
}
return bos.toByteArray();
}
@Override
public BufferedReader getReader() throws IOException {
return new BufferedReader(new InputStreamReader(getInputStream()));
}
@Override
public Map<String, String[]> getParameterMap() {
return stringMap;
}
@Override
public String getParameter(String name) {
String[] values = stringMap.get(name);
if (values == null || values.length == 0) {
return null;
}
return values[0];
}
@Override
public String[] getParameterValues(String name) {
return stringMap.get(name);
}
@Override
public ServletInputStream getInputStream() throws IOException {
final ByteArrayInputStream inputStream = new ByteArrayInputStream(body);
return new ServletInputStream() {
@Override
public int read() throws IOException {
return inputStream.read();
}
@Override
public boolean isFinished() {
return false;
}
@Override
public boolean isReady() {
return false;
}
@Override
public void setReadListener(ReadListener readListener) {
}
};
}
}
|
ReuseHttpServletRequest
|
java
|
resilience4j__resilience4j
|
resilience4j-rxjava2/src/main/java/io/github/resilience4j/timelimiter/transformer/TimeLimiterTransformer.java
|
{
"start": 815,
"end": 3152
}
|
class ____<T> implements FlowableTransformer<T, T>,
ObservableTransformer<T, T>,
SingleTransformer<T, T>, CompletableTransformer, MaybeTransformer<T, T> {
private final TimeLimiter timeLimiter;
private TimeLimiterTransformer(TimeLimiter timeLimiter) {
this.timeLimiter = timeLimiter;
}
/**
* Creates a TimeLimiterTransformer.
*
* @param timeLimiter the TimeLimiter
* @param <T> the value type of the upstream and downstream
* @return a TimeLimiterTransformer
*/
public static <T> TimeLimiterTransformer<T> of(TimeLimiter timeLimiter) {
return new TimeLimiterTransformer<>(timeLimiter);
}
@Override
public Publisher<T> apply(Flowable<T> upstream) {
return upstream
.timeout(getTimeoutInMillis(), TimeUnit.MILLISECONDS)
.doOnNext(t -> timeLimiter.onSuccess())
.doOnComplete(timeLimiter::onSuccess)
.doOnError(timeLimiter::onError);
}
@Override
public ObservableSource<T> apply(Observable<T> upstream) {
return upstream
.timeout(getTimeoutInMillis(), TimeUnit.MILLISECONDS)
.doOnNext(t -> timeLimiter.onSuccess())
.doOnComplete(timeLimiter::onSuccess)
.doOnError(timeLimiter::onError);
}
@Override
public SingleSource<T> apply(Single<T> upstream) {
return upstream
.timeout(getTimeoutInMillis(), TimeUnit.MILLISECONDS)
.doOnSuccess(t -> timeLimiter.onSuccess())
.doOnError(timeLimiter::onError);
}
@Override
public CompletableSource apply(Completable upstream) {
return upstream
.timeout(getTimeoutInMillis(), TimeUnit.MILLISECONDS)
.doOnComplete(timeLimiter::onSuccess)
.doOnError(timeLimiter::onError);
}
@Override
public MaybeSource<T> apply(Maybe<T> upstream) {
return upstream
.timeout(getTimeoutInMillis(), TimeUnit.MILLISECONDS)
.doOnSuccess(t -> timeLimiter.onSuccess())
.doOnComplete(timeLimiter::onSuccess)
.doOnError(timeLimiter::onError);
}
private long getTimeoutInMillis() {
return timeLimiter.getTimeLimiterConfig()
.getTimeoutDuration()
.toMillis();
}
}
|
TimeLimiterTransformer
|
java
|
alibaba__nacos
|
client-basic/src/main/java/com/alibaba/nacos/client/auth/impl/NacosAuthLoginConstant.java
|
{
"start": 728,
"end": 1251
}
|
class ____ {
public static final String ACCESSTOKEN = "accessToken";
public static final String TOKENTTL = "tokenTtl";
public static final String TOKENREFRESHWINDOW = "tokenRefreshWindow";
public static final String USERNAME = "username";
public static final String PASSWORD = "password";
public static final String COLON = ":";
public static final String SERVER = "server";
public static final String RELOGINFLAG = "reLoginFlag";
}
|
NacosAuthLoginConstant
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/notfound/OptionalEagerMappedByNotFoundTest.java
|
{
"start": 1868,
"end": 4580
}
|
class ____ {
@AfterEach
public void deleteData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testOneToOneJoinException(SessionFactoryScope scope) {
setupTest( PersonOneToOneJoinException.class, 1L, false, scope );
scope.inTransaction(
session -> {
Person pCheck = session.find( PersonOneToOneJoinException.class, 1L );
checkResult( pCheck );
}
);
}
@Test
public void testOneToOneJoinIgnore(SessionFactoryScope scope) {
setupTest( PersonOneToOneJoinIgnore.class, 1L, false, scope );
scope.inTransaction(
session -> {
Person pCheck = session.find( PersonOneToOneJoinIgnore.class, 1L );
checkResult( pCheck );
}
);
}
@Test
public void testOneToOneSelectException(SessionFactoryScope scope) {
setupTest( PersonOneToOneSelectException.class, 1L, false, scope );
scope.inTransaction(
session -> {
Person pCheck = session.find( PersonOneToOneSelectException.class, 1L );
checkResult( pCheck );
}
);
}
@Test
public void testOneToOneSelectIgnore(SessionFactoryScope scope) {
setupTest( PersonOneToOneSelectIgnore.class, 1L, false, scope );
scope.inTransaction(
session -> {
Person pCheck = session.find( PersonOneToOneSelectIgnore.class, 1L );
checkResult( pCheck );
}
);
}
private <T extends Person> void setupTest(Class<T> clazz, long id, boolean isMapsId, SessionFactoryScope scope) {
persistData( clazz, id, isMapsId, scope );
scope.inTransaction(
session -> {
Person p = session.find( clazz, id );
assertEquals( "New York", p.getEmployment().getName() );
}
);
scope.inTransaction(
session ->
session.createNativeQuery( "delete from Employment where id = " + id )
.executeUpdate()
);
}
private <T extends Person> void persistData(Class<T> clazz, long id, boolean isMapsId, SessionFactoryScope scope) {
final Person person;
try {
person = clazz.newInstance();
}
catch (Exception ex) {
throw new RuntimeException( ex );
}
scope.inTransaction(
session -> {
Employment employment = new Employment();
employment.setId( id );
employment.setName( "New York" );
if ( !isMapsId ) {
person.setId( id );
}
person.setName( "John Doe" );
person.setEmployment( employment );
employment.setPerson( person );
session.persist( person );
}
);
}
private void checkResult(Person person) {
assertNotNull( person );
assertNotNull( person.getId() );
assertNull( person.getEmployment() );
}
@Entity(name = "Person")
@Inheritance(strategy = InheritanceType.JOINED)
public abstract static
|
OptionalEagerMappedByNotFoundTest
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/validators/JavadocWording.java
|
{
"start": 966,
"end": 55554
}
|
class ____ {
public static int lineNumber(CharSequence s, int index) {
int cnt = 1;
for (int i = 0; i < index; i++) {
if (s.charAt(i) == '\n') {
cnt++;
}
}
return cnt;
}
@Test
public void maybeDocRefersToMaybeTypes() throws Exception {
List<RxMethod> list = BaseTypeParser.parse(TestHelper.findSource("Maybe"), "Maybe");
assertFalse(list.isEmpty());
StringBuilder e = new StringBuilder();
for (RxMethod m : list) {
int jdx;
if (m.javadoc != null) {
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("onNext", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")
&& !m.signature.contains("Flowable")
&& !m.signature.contains("Observable")
&& !m.signature.contains("ObservableSource")) {
e.append("java.lang.RuntimeException: Maybe doc mentions onNext but no Flowable/Observable in signature\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Subscriber", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")
&& !m.signature.contains("Flowable")
&& !m.signature.contains("TestSubscriber")
) {
e.append("java.lang.RuntimeException: Maybe doc mentions Subscriber but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Subscription", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")
&& !m.signature.contains("Flowable")
) {
e.append("java.lang.RuntimeException: Maybe doc mentions Subscription but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Observer", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")
&& !m.signature.contains("Observable")
&& !m.signature.contains("TestObserver")) {
if (idx < 5 || !m.javadoc.substring(idx - 5, idx + 8).equals("MaybeObserver")) {
e.append("java.lang.RuntimeException: Maybe doc mentions Observer but not using Observable\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Publisher", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")) {
if (idx == 0 || !m.javadoc.substring(idx - 1, idx + 9).equals("(Publisher")) {
e.append("java.lang.RuntimeException: Maybe doc mentions Publisher but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Flowable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Flowable")) {
Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*Flowable");
if (!p.matcher(m.javadoc).find()) {
e.append("java.lang.RuntimeException: Maybe doc mentions Flowable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Single", jdx);
if (idx >= 0 && m.javadoc.indexOf("Single#", jdx) != idx) {
int j = m.javadoc.indexOf("#toSingle", jdx);
int k = m.javadoc.indexOf("{@code Single", jdx);
if (!m.signature.contains("Single") && (j + 3 != idx && k + 7 != idx)) {
e.append("java.lang.RuntimeException: Maybe doc mentions Single but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("SingleSource", jdx);
if (idx >= 0) {
if (!m.signature.contains("SingleSource")) {
e.append("java.lang.RuntimeException: Maybe doc mentions SingleSource but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Observable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Observable")) {
Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*Observable");
if (!p.matcher(m.javadoc).find()) {
e.append("java.lang.RuntimeException: Maybe doc mentions Observable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("ObservableSource", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")) {
e.append("java.lang.RuntimeException: Maybe doc mentions ObservableSource but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
checkAtReturnAndSignatureMatch("Maybe", m, e, "Flowable", "Observable", "Maybe", "Single", "Completable", "Disposable", "Iterable", "Stream", "Future", "CompletionStage");
aOrAn(e, m, "Maybe");
missingClosingDD(e, m, "Maybe", "io.reactivex.rxjava3.core");
backpressureMentionedWithoutAnnotation(e, m, "Maybe");
}
}
if (e.length() != 0) {
System.out.println(e);
fail(e.toString());
}
}
@Test
public void flowableDocRefersToFlowableTypes() throws Exception {
List<RxMethod> list = BaseTypeParser.parse(TestHelper.findSource("Flowable"), "Flowable");
assertFalse(list.isEmpty());
StringBuilder e = new StringBuilder();
for (RxMethod m : list) {
int jdx;
if (m.javadoc != null) {
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("onSuccess", jdx);
if (idx >= 0) {
if (!m.signature.contains("Maybe")
&& !m.signature.contains("MaybeSource")
&& !m.signature.contains("Single")
&& !m.signature.contains("SingleSource")) {
e.append("java.lang.RuntimeException: Flowable doc mentions onSuccess\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Observer", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")
&& !m.signature.contains("Observable")) {
e.append("java.lang.RuntimeException: Flowable doc mentions Observer but not using Observable\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" SingleObserver", jdx);
if (idx >= 0) {
if (!m.signature.contains("SingleSource")
&& !m.signature.contains("Single")) {
e.append("java.lang.RuntimeException: Flowable doc mentions SingleObserver but not using Single\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" MaybeObserver", jdx);
if (idx >= 0) {
if (!m.signature.contains("MaybeSource")
&& !m.signature.contains("Maybe")) {
e.append("java.lang.RuntimeException: Flowable doc mentions MaybeObserver but not using Maybe\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Disposable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Observable")
&& !m.signature.contains("ObservableSource")
&& !m.signature.contains("Single")
&& !m.signature.contains("SingleSource")
&& !m.signature.contains("Completable")
&& !m.signature.contains("CompletableSource")
&& !m.signature.contains("Maybe")
&& !m.signature.contains("MaybeSource")
&& !m.signature.contains("Disposable")
&& !m.signature.contains("void subscribe")
) {
CharSequence subSequence = m.javadoc.subSequence(idx - 6, idx + 11);
if (idx < 6 || !subSequence.equals("{@link Disposable")) {
e.append("java.lang.RuntimeException: Flowable doc mentions Disposable but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Observable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Observable")) {
e.append("java.lang.RuntimeException: Flowable doc mentions Observable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("ObservableSource", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")) {
e.append("java.lang.RuntimeException: Flowable doc mentions ObservableSource but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
checkAtReturnAndSignatureMatch("Flowable", m, e, "Flowable", "Observable", "Maybe", "Single", "Completable", "ConnectableFlowable", "ParallelFlowable", "Disposable", "Iterable", "Stream", "Future", "CompletionStage");
aOrAn(e, m, "Flowable");
missingClosingDD(e, m, "Flowable", "io.reactivex.rxjava3.core");
backpressureMentionedWithoutAnnotation(e, m, "Flowable");
}
}
if (e.length() != 0) {
System.out.println(e);
fail(e.toString());
}
}
@Test
public void parallelFlowableDocRefersToCorrectTypes() throws Exception {
List<RxMethod> list = BaseTypeParser.parse(TestHelper.findSource("ParallelFlowable", "io.reactivex.rxjava3.parallel"), "ParallelFlowable");
assertFalse(list.isEmpty());
StringBuilder e = new StringBuilder();
for (RxMethod m : list) {
int jdx;
if (m.javadoc != null) {
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("onSuccess", jdx);
if (idx >= 0) {
if (!m.signature.contains("Maybe")
&& !m.signature.contains("MaybeSource")
&& !m.signature.contains("Single")
&& !m.signature.contains("SingleSource")) {
e.append("java.lang.RuntimeException: Flowable doc mentions onSuccess\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Observer", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")
&& !m.signature.contains("Observable")) {
e.append("java.lang.RuntimeException: Flowable doc mentions Observer but not using Observable\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" SingleObserver", jdx);
if (idx >= 0) {
if (!m.signature.contains("SingleSource")
&& !m.signature.contains("Single")) {
e.append("java.lang.RuntimeException: Flowable doc mentions SingleObserver but not using Single\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" MaybeObserver", jdx);
if (idx >= 0) {
if (!m.signature.contains("MaybeSource")
&& !m.signature.contains("Maybe")) {
e.append("java.lang.RuntimeException: Flowable doc mentions MaybeObserver but not using Maybe\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Disposable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Observable")
&& !m.signature.contains("ObservableSource")
&& !m.signature.contains("Single")
&& !m.signature.contains("SingleSource")
&& !m.signature.contains("Completable")
&& !m.signature.contains("CompletableSource")
&& !m.signature.contains("Maybe")
&& !m.signature.contains("MaybeSource")
&& !m.signature.contains("Disposable")
) {
CharSequence subSequence = m.javadoc.subSequence(idx - 6, idx + 11);
if (idx < 6 || !subSequence.equals("{@link Disposable")) {
e.append("java.lang.RuntimeException: Flowable doc mentions Disposable but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Observable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Observable")) {
e.append("java.lang.RuntimeException: Flowable doc mentions Observable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("ObservableSource", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")) {
e.append("java.lang.RuntimeException: Flowable doc mentions ObservableSource but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Flowable.method(Flowable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
checkAtReturnAndSignatureMatch("ParallelFlowable", m, e, "Flowable", "Observable", "Maybe", "Single", "Completable", "ConnectableFlowable", "ParallelFlowable", "Disposable", "Iterable", "Stream", "Future", "CompletionStage");
aOrAn(e, m, "ParallelFlowable");
missingClosingDD(e, m, "ParallelFlowable", "io.reactivex.rxjava3.parallel");
backpressureMentionedWithoutAnnotation(e, m, "ParallelFlowable");
}
}
if (e.length() != 0) {
System.out.println(e);
fail(e.toString());
}
}
@Test
public void observableDocRefersToObservableTypes() throws Exception {
List<RxMethod> list = BaseTypeParser.parse(TestHelper.findSource("Observable"), "Observable");
assertFalse(list.isEmpty());
StringBuilder e = new StringBuilder();
for (RxMethod m : list) {
int jdx;
if (m.javadoc != null) {
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("onSuccess", jdx);
if (idx >= 0) {
if (!m.signature.contains("Maybe")
&& !m.signature.contains("MaybeSource")
&& !m.signature.contains("Single")
&& !m.signature.contains("SingleSource")) {
e.append("java.lang.RuntimeException: Observable doc mentions onSuccess\r\n at io.reactivex.rxjava3.core.")
.append("Observable.method(Observable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Subscription", jdx);
if (idx >= 0) {
if (!m.signature.contains("Flowable")
&& !m.signature.contains("Publisher")
) {
e.append("java.lang.RuntimeException: Observable doc mentions Subscription but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Observable.method(Observable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Flowable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Flowable")) {
if (idx < 6 || !m.javadoc.substring(idx - 6, idx + 8).equals("@link Flowable")) {
e.append("java.lang.RuntimeException: Observable doc mentions Flowable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Observable.method(Observable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Publisher", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")) {
e.append("java.lang.RuntimeException: Observable doc mentions Publisher but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Observable.method(Observable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Subscriber", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")
&& !m.signature.contains("Flowable")) {
e.append("java.lang.RuntimeException: Observable doc mentions Subscriber but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Observable.method(Observable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
checkAtReturnAndSignatureMatch("Observable", m, e, "Flowable", "Observable", "Maybe", "Single", "Completable", "ConnectableObservable", "Disposable", "Iterable", "Stream", "Future", "CompletionStage");
aOrAn(e, m, "Observable");
missingClosingDD(e, m, "Observable", "io.reactivex.rxjava3.core");
backpressureMentionedWithoutAnnotation(e, m, "Observable");
}
}
if (e.length() != 0) {
System.out.println(e);
fail(e.toString());
}
}
@Test
public void singleDocRefersToSingleTypes() throws Exception {
List<RxMethod> list = BaseTypeParser.parse(TestHelper.findSource("Single"), "Single");
assertFalse(list.isEmpty());
StringBuilder e = new StringBuilder();
for (RxMethod m : list) {
int jdx;
if (m.javadoc != null) {
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("onNext", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")
&& !m.signature.contains("Flowable")
&& !m.signature.contains("Observable")
&& !m.signature.contains("ObservableSource")) {
e.append("java.lang.RuntimeException: Single doc mentions onNext but no Flowable/Observable in signature\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Subscriber", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")
&& !m.signature.contains("Flowable")
&& !m.signature.contains("TestSubscriber")) {
e.append("java.lang.RuntimeException: Single doc mentions Subscriber but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Subscription", jdx);
if (idx >= 0) {
if (!m.signature.contains("Flowable")
&& !m.signature.contains("Publisher")
) {
e.append("java.lang.RuntimeException: Single doc mentions Subscription but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Observer", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")
&& !m.signature.contains("Observable")
&& !m.signature.contains("TestObserver")) {
if (idx < 6 || !m.javadoc.substring(idx - 6, idx + 8).equals("SingleObserver")) {
e.append("java.lang.RuntimeException: Single doc mentions Observer but not using Observable\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Publisher", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")) {
if (idx == 0 || !m.javadoc.substring(idx - 1, idx + 9).equals("(Publisher")) {
e.append("java.lang.RuntimeException: Single doc mentions Publisher but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Flowable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Flowable")) {
e.append("java.lang.RuntimeException: Single doc mentions Flowable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Maybe", jdx);
if (idx >= 0) {
if (!m.signature.contains("Maybe")) {
e.append("java.lang.RuntimeException: Single doc mentions Maybe but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" MaybeSource", jdx);
if (idx >= 0) {
if (!m.signature.contains("MaybeSource")) {
e.append("java.lang.RuntimeException: Single doc mentions SingleSource but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Maybe.method(Maybe.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Observable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Observable")) {
e.append("java.lang.RuntimeException: Single doc mentions Observable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" ObservableSource", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")) {
e.append("java.lang.RuntimeException: Single doc mentions ObservableSource but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Single.method(Single.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
checkAtReturnAndSignatureMatch("Single", m, e, "Flowable", "Observable", "Maybe", "Single", "Completable", "Disposable", "Iterable", "Stream", "Future", "CompletionStage");
aOrAn(e, m, "Single");
missingClosingDD(e, m, "Single", "io.reactivex.rxjava3.core");
backpressureMentionedWithoutAnnotation(e, m, "Single");
}
}
if (e.length() != 0) {
System.out.println(e);
fail(e.toString());
}
}
@Test
public void completableDocRefersToCompletableTypes() throws Exception {
List<RxMethod> list = BaseTypeParser.parse(TestHelper.findSource("Completable"), "Completable");
assertFalse(list.isEmpty());
StringBuilder e = new StringBuilder();
for (RxMethod m : list) {
int jdx;
if (m.javadoc != null) {
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("onNext", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")
&& !m.signature.contains("Flowable")
&& !m.signature.contains("Observable")
&& !m.signature.contains("ObservableSource")) {
e.append("java.lang.RuntimeException: Completable doc mentions onNext but no Flowable/Observable in signature\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Subscriber", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")
&& !m.signature.contains("Flowable")
&& !m.signature.contains("TestSubscriber")) {
e.append("java.lang.RuntimeException: Completable doc mentions Subscriber but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Subscription", jdx);
if (idx >= 0) {
if (!m.signature.contains("Flowable")
&& !m.signature.contains("Publisher")
) {
e.append("java.lang.RuntimeException: Completable doc mentions Subscription but not using Flowable\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Observer", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")
&& !m.signature.contains("Observable")
&& !m.signature.contains("TestObserver")) {
if (idx < 11 || !m.javadoc.substring(idx - 11, idx + 8).equals("CompletableObserver")) {
e.append("java.lang.RuntimeException: Completable doc mentions Observer but not using Observable\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Publisher", jdx);
if (idx >= 0) {
if (!m.signature.contains("Publisher")) {
if (idx == 0 || !m.javadoc.substring(idx - 1, idx + 9).equals("(Publisher")) {
e.append("java.lang.RuntimeException: Completable doc mentions Publisher but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Flowable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Flowable")) {
Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*Flowable");
if (!p.matcher(m.javadoc).find()) {
e.append("java.lang.RuntimeException: Completable doc mentions Flowable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("Single", jdx);
if (idx >= 0) {
if (!m.signature.contains("Single")) {
Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*Single");
if (!p.matcher(m.javadoc).find()) {
e.append("java.lang.RuntimeException: Completable doc mentions Single but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("SingleSource", jdx);
if (idx >= 0) {
if (!m.signature.contains("SingleSource")) {
Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*SingleSource");
if (!p.matcher(m.javadoc).find()) {
e.append("java.lang.RuntimeException: Completable doc mentions SingleSource but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf(" Observable", jdx);
if (idx >= 0) {
if (!m.signature.contains("Observable")) {
Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*Observable");
if (!p.matcher(m.javadoc).find()) {
e.append("java.lang.RuntimeException: Completable doc mentions Observable but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
int idx = m.javadoc.indexOf("ObservableSource", jdx);
if (idx >= 0) {
if (!m.signature.contains("ObservableSource")) {
Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*ObservableSource");
if (!p.matcher(m.javadoc).find()) {
e.append("java.lang.RuntimeException: Completable doc mentions ObservableSource but not in the signature\r\n at io.reactivex.rxjava3.core.")
.append("Completable.method(Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
}
}
jdx = idx + 6;
} else {
break;
}
}
checkAtReturnAndSignatureMatch("Completable", m, e, "Flowable", "Observable", "Maybe", "Single", "Completable", "Disposable", "Iterable", "Stream", "Future", "CompletionStage");
aOrAn(e, m, "Completable");
missingClosingDD(e, m, "Completable", "io.reactivex.rxjava3.core");
backpressureMentionedWithoutAnnotation(e, m, "Completable");
}
}
if (e.length() != 0) {
System.out.println(e);
fail(e.toString());
}
}
static void checkAtReturnAndSignatureMatch(String className, RxMethod m, StringBuilder e, String... types) {
for (String t : types) {
String regex;
if (t.contains("Completable")) {
regex = "(?s).*?\\s" + t + "\\s+\\w+\\(.*";
} else {
regex = "(?s).*?\\s" + t + "\\<.*?\\>\\s+\\w+\\(.*";
}
if (m.signature.matches(regex)) {
for (String at : AT_RETURN_WORDS) {
for (String u : types) {
if (!t.equals(u)) {
int idx = m.javadoc.indexOf(at + "{@code " + u);
if (idx >= 0) {
e.append("Returns ").append(t)
.append(" but docs return ")
.append(u)
.append("\r\n at io.reactivex.rxjava3.core.")
.append(className)
.append(".method(")
.append(className)
.append(".java:")
.append(m.javadocLine + lineNumber(m.javadoc, idx) - 1)
.append(")\r\n\r\n");
}
}
}
}
}
}
}
static void aOrAn(StringBuilder e, RxMethod m, String baseTypeName) {
aOrAn(e, m, " an", "Single", baseTypeName);
aOrAn(e, m, " an", "Maybe", baseTypeName);
aOrAn(e, m, " a", "Observer", baseTypeName);
aOrAn(e, m, " a", "Observable", baseTypeName);
aOrAn(e, m, " an", "Publisher", baseTypeName);
aOrAn(e, m, " an", "Subscriber", baseTypeName);
aOrAn(e, m, " an", "Flowable", baseTypeName);
aOrAn(e, m, " a", "Observable", baseTypeName);
}
static void aOrAn(StringBuilder e, RxMethod m, String wrongPre, String word, String baseTypeName) {
int jdx = 0;
int idx;
for (;;) {
idx = m.javadoc.indexOf(wrongPre + " " + word, jdx);
if (idx >= 0) {
e.append("java.lang.RuntimeException: a/an typo ")
.append(word)
.append("\r\n at io.reactivex.rxjava3.core.")
.append(baseTypeName)
.append(".method(")
.append(baseTypeName)
.append(".java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
idx = m.javadoc.indexOf(wrongPre + " {@link " + word, jdx);
if (idx >= 0) {
e.append("java.lang.RuntimeException: a/an typo ")
.append(word)
.append("\r\n at io.reactivex.rxjava3.core.")
.append(baseTypeName)
.append(".method(")
.append(baseTypeName)
.append(".java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
idx = m.javadoc.indexOf(wrongPre + " {@linkplain " + word, jdx);
if (idx >= 0) {
e.append("java.lang.RuntimeException: a/an typo ")
.append(word)
.append("\r\n at io.reactivex.rxjava3.core.")
.append(baseTypeName)
.append(".method(")
.append(baseTypeName)
.append(".java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
jdx = idx + 6;
} else {
break;
}
}
jdx = 0;
for (;;) {
idx = m.javadoc.indexOf(wrongPre + " {@code " + word, jdx);
if (idx >= 0) {
e.append("java.lang.RuntimeException: a/an typo ")
.append(word)
.append("\r\n at io.reactivex.rxjava3.core.")
.append(baseTypeName)
.append(".method(")
.append(baseTypeName)
.append(".java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
jdx = idx + 6;
} else {
break;
}
}
// remove linebreaks and multi-spaces
String javadoc2 = m.javadoc.replace("\n", " ").replace("\r", " ")
.replace(" * ", " ")
.replaceAll("\\s+", " ");
// strip {@xxx } tags
int kk = 0;
for (;;) {
int jj = javadoc2.indexOf("{@", kk);
if (jj < 0) {
break;
}
int nn = javadoc2.indexOf(" ", jj + 2);
int mm = javadoc2.indexOf("}", jj + 2);
javadoc2 = javadoc2.substring(0, jj) + javadoc2.substring(nn + 1, mm) + javadoc2.substring(mm + 1);
kk = mm + 1;
}
jdx = 0;
for (;;) {
idx = javadoc2.indexOf(wrongPre + " " + word, jdx);
if (idx >= 0) {
e.append("java.lang.RuntimeException: a/an typo ")
.append(word)
.append("\r\n at io.reactivex.rxjava3.core.")
.append(baseTypeName)
.append(".method(")
.append(baseTypeName)
.append(".java:").append(m.javadocLine).append(")\r\n\r\n");
jdx = idx + wrongPre.length() + 1 + word.length();
} else {
break;
}
}
}
static void missingClosingDD(StringBuilder e, RxMethod m, String baseTypeName, String packageName) {
int jdx = 0;
for (;;) {
int idx1 = m.javadoc.indexOf("<dd>", jdx);
int idx2 = m.javadoc.indexOf("</dd>", jdx);
if (idx1 < 0 && idx2 < 0) {
break;
}
int idx3 = m.javadoc.indexOf("<dd>", idx1 + 4);
if (idx1 > 0 && idx2 > 0 && (idx3 < 0 || (idx2 < idx3 && idx3 > 0))) {
jdx = idx2 + 5;
} else {
e.append("java.lang.RuntimeException: unbalanced <dd></dd> ")
.append("\r\n at ")
.append(packageName)
.append(".")
.append(baseTypeName)
.append(".method(")
.append(baseTypeName)
.append(".java:").append(m.javadocLine + lineNumber(m.javadoc, idx1) - 1).append(")\r\n\r\n");
break;
}
}
}
static void backpressureMentionedWithoutAnnotation(StringBuilder e, RxMethod m, String baseTypeName) {
if (m.backpressureDocLine > 0 && m.backpressureKind == null) {
e.append("java.lang.RuntimeException: backpressure documented but not annotated ")
.append("\r\n at io.reactivex.rxjava3.core.")
.append(baseTypeName)
.append(".method(")
.append(baseTypeName)
.append(".java:").append(m.backpressureDocLine).append(")\r\n\r\n");
}
}
static final String[] AT_RETURN_WORDS = { "@return a ", "@return an ", "@return the new ", "@return a new " };
}
|
JavadocWording
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/SurrogateFunction.java
|
{
"start": 332,
"end": 398
}
|
interface ____ {
ScalarFunction substitute();
}
|
SurrogateFunction
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-resource-server/src/test/java/org/springframework/security/oauth2/server/resource/web/access/server/BearerTokenServerAccessDeniedHandlerTests.java
|
{
"start": 4134,
"end": 4640
}
|
class ____
extends AbstractOAuth2TokenAuthenticationToken<TestingOAuth2TokenAuthenticationToken.TestingOAuth2Token> {
private Map<String, Object> attributes;
protected TestingOAuth2TokenAuthenticationToken(Map<String, Object> attributes) {
super(new TestingOAuth2TokenAuthenticationToken.TestingOAuth2Token("token"));
this.attributes = attributes;
}
@Override
public Map<String, Object> getTokenAttributes() {
return this.attributes;
}
static
|
TestingOAuth2TokenAuthenticationToken
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/web/server/ServerOAuth2AuthorizationCodeAuthenticationTokenConverterTests.java
|
{
"start": 2205,
"end": 6846
}
|
class ____ {
@Mock
private ReactiveClientRegistrationRepository clientRegistrationRepository;
@Mock
private ServerAuthorizationRequestRepository authorizationRequestRepository;
private String clientRegistrationId = "github";
// @formatter:off
private ClientRegistration clientRegistration = ClientRegistration.withRegistrationId(this.clientRegistrationId)
.redirectUri("{baseUrl}/{action}/oauth2/code/{registrationId}")
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.scope("read:user")
.authorizationUri("https://github.com/login/oauth/authorize")
.tokenUri("https://github.com/login/oauth/access_token")
.userInfoUri("https://api.github.com/user")
.userNameAttributeName("id")
.clientName("GitHub")
.clientId("clientId")
.clientSecret("clientSecret")
.build();
// @formatter:on
// @formatter:off
private OAuth2AuthorizationRequest.Builder authorizationRequest = OAuth2AuthorizationRequest.authorizationCode()
.authorizationUri("https://example.com/oauth2/authorize")
.clientId("client-id")
.redirectUri("http://localhost/client-1")
.state("state")
.attributes(Collections.singletonMap(OAuth2ParameterNames.REGISTRATION_ID, this.clientRegistrationId));
// @formatter:on
private final MockServerHttpRequest.BaseBuilder<?> request = MockServerHttpRequest.get("/");
private ServerOAuth2AuthorizationCodeAuthenticationTokenConverter converter;
@BeforeEach
public void setup() {
this.converter = new ServerOAuth2AuthorizationCodeAuthenticationTokenConverter(
this.clientRegistrationRepository);
this.converter.setAuthorizationRequestRepository(this.authorizationRequestRepository);
}
@Test
public void applyWhenAuthorizationRequestEmptyThenOAuth2AuthorizationException() {
given(this.authorizationRequestRepository.removeAuthorizationRequest(any())).willReturn(Mono.empty());
assertThatExceptionOfType(OAuth2AuthorizationException.class).isThrownBy(() -> applyConverter());
}
@Test
public void applyWhenAttributesMissingThenOAuth2AuthorizationException() {
this.authorizationRequest.attributes(Map::clear);
given(this.authorizationRequestRepository.removeAuthorizationRequest(any()))
.willReturn(Mono.just(this.authorizationRequest.build()));
assertThatExceptionOfType(OAuth2AuthorizationException.class).isThrownBy(() -> applyConverter())
.withMessageContaining(
ServerOAuth2AuthorizationCodeAuthenticationTokenConverter.CLIENT_REGISTRATION_NOT_FOUND_ERROR_CODE);
}
@Test
public void applyWhenClientRegistrationMissingThenOAuth2AuthorizationException() {
given(this.authorizationRequestRepository.removeAuthorizationRequest(any()))
.willReturn(Mono.just(this.authorizationRequest.build()));
given(this.clientRegistrationRepository.findByRegistrationId(any())).willReturn(Mono.empty());
assertThatExceptionOfType(OAuth2AuthorizationException.class).isThrownBy(() -> applyConverter())
.withMessageContaining(
ServerOAuth2AuthorizationCodeAuthenticationTokenConverter.CLIENT_REGISTRATION_NOT_FOUND_ERROR_CODE);
}
@Test
public void applyWhenCodeParameterNotFoundThenErrorCode() {
this.request.queryParam(OAuth2ParameterNames.ERROR, "error");
given(this.authorizationRequestRepository.removeAuthorizationRequest(any()))
.willReturn(Mono.just(this.authorizationRequest.build()));
given(this.clientRegistrationRepository.findByRegistrationId(any()))
.willReturn(Mono.just(this.clientRegistration));
assertThat(applyConverter().getAuthorizationExchange().getAuthorizationResponse().getError().getErrorCode())
.isEqualTo("error");
}
@Test
public void applyWhenCodeParameterFoundThenCode() {
this.request.queryParam(OAuth2ParameterNames.CODE, "code");
given(this.authorizationRequestRepository.removeAuthorizationRequest(any()))
.willReturn(Mono.just(this.authorizationRequest.build()));
given(this.clientRegistrationRepository.findByRegistrationId(any()))
.willReturn(Mono.just(this.clientRegistration));
OAuth2AuthorizationCodeAuthenticationToken result = applyConverter();
OAuth2AuthorizationResponse exchange = result.getAuthorizationExchange().getAuthorizationResponse();
assertThat(exchange.getError()).isNull();
assertThat(exchange.getCode()).isEqualTo("code");
}
private OAuth2AuthorizationCodeAuthenticationToken applyConverter() {
MockServerWebExchange exchange = MockServerWebExchange.from(this.request);
return (OAuth2AuthorizationCodeAuthenticationToken) this.converter.convert(exchange).block();
}
}
|
ServerOAuth2AuthorizationCodeAuthenticationTokenConverterTests
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-proxyexchange-webmvc/src/test/java/org/springframework/cloud/gateway/mvc/config/ProxyExchangeArgumentResolverTest.java
|
{
"start": 3546,
"end": 3836
}
|
class ____ {
private URI home;
public void setHome(URI home) {
this.home = home;
}
@GetMapping("/proxy")
public ResponseEntity<?> proxyFoo(ProxyExchange<?> proxy) {
return proxy.uri(home.toString() + "/foo").get();
}
}
@RestController
static
|
ProxyController
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/CriteriaIsNullTest.java
|
{
"start": 2832,
"end": 3317
}
|
class ____ {
@Id
private Integer id;
private String name;
@OneToOne
private Account account;
Person() {
}
public Person(Integer id, String name, Account account) {
this.id = id;
this.name = name;
this.account = account;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public Account getAccount() {
return account;
}
}
@Entity(name = "Account")
@Table(name = "ACCOUNT_TABLE")
public static
|
Person
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/json/AbstractJsonReplaceFunction.java
|
{
"start": 728,
"end": 1307
}
|
class ____ extends AbstractSqmSelfRenderingFunctionDescriptor {
public AbstractJsonReplaceFunction(TypeConfiguration typeConfiguration) {
super(
"json_replace",
FunctionKind.NORMAL,
new ArgumentTypesValidator(
StandardArgumentsValidators.exactly( 3 ),
FunctionParameterType.IMPLICIT_JSON,
FunctionParameterType.STRING,
FunctionParameterType.ANY
),
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().resolve( String.class, SqlTypes.JSON )
),
null
);
}
}
|
AbstractJsonReplaceFunction
|
java
|
netty__netty
|
handler/src/test/java/io/netty/handler/ssl/MockAlternativeKeyProvider.java
|
{
"start": 3437,
"end": 4347
}
|
class ____ implements PrivateKey {
private final PrivateKey delegate;
AlternativePrivateKeyWrapper(PrivateKey delegate) {
this.delegate = delegate;
}
@Override
public String getAlgorithm() {
return delegate.getAlgorithm();
}
@Override
public String getFormat() {
// Alternative key providers typically don't support standard formats
return null;
}
@Override
public byte[] getEncoded() {
// This is the key behavior: alternative key providers return null
// because the private key material is not directly accessible
return null;
}
// Provide access to the real key for signature operations
PrivateKey getDelegate() {
return delegate;
}
}
// Abstract base
|
AlternativePrivateKeyWrapper
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/introspect/BeanDescriptionTest.java
|
{
"start": 530,
"end": 957
}
|
class ____ {
public int x;
}
@Test
public void testClassDesc() throws Exception
{
BeanDescription beanDesc = ObjectMapperTestAccess.beanDescriptionForDeser(MAPPER, DocumentedBean.class);
assertEquals(CLASS_DESC, MAPPER.deserializationConfig().getAnnotationIntrospector()
.findClassDescription(MAPPER.deserializationConfig(), beanDesc.getClassInfo()));
}
}
|
DocumentedBean
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TempDirectoryTests.java
|
{
"start": 52918,
"end": 53271
}
|
class ____ {
@Test
void test(@TempDir Path tempDir1, @TempDir Path tempDir2) {
assertNotSame(tempDir1, tempDir2);
assertThat(tempDir1.getFileName()).asString().startsWith("custom");
assertThat(tempDir2.getFileName()).asString().startsWith("custom");
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
static
|
CustomDefaultFactoryTestCase
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
|
{
"start": 12194,
"end": 12840
}
|
class ____ extends AbstractCounterGroup<Counter> {
GenericGroup(String name, String displayName, Limits limits) {
super(name, displayName, limits);
}
@Override
protected Counter newCounter(String counterName, String displayName,
long value) {
return new Counter(new GenericCounter(counterName, displayName, value));
}
@Override
protected Counter newCounter() {
return new Counter();
}
@Override
public CounterGroupBase<Counter> getUnderlyingGroup() {
return this;
}
}
// Mix the framework group implementation into the Group
|
GenericGroup
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/support/AbstractMessageChannel.java
|
{
"start": 1838,
"end": 4841
}
|
class ____.
* @param logger the logger to use
* @since 5.1
*/
public void setLogger(Log logger) {
this.logger = logger;
}
/**
* Return the currently configured Logger.
* @since 5.1
*/
public Log getLogger() {
return logger;
}
/**
* A message channel uses the bean name primarily for logging purposes.
*/
@Override
public void setBeanName(String name) {
this.beanName = name;
}
/**
* Return the bean name for this message channel.
*/
public String getBeanName() {
return this.beanName;
}
@Override
public void setInterceptors(List<ChannelInterceptor> interceptors) {
Assert.noNullElements(interceptors, "'interceptors' must not contain null elements");
this.interceptors.clear();
this.interceptors.addAll(interceptors);
}
@Override
public void addInterceptor(ChannelInterceptor interceptor) {
Assert.notNull(interceptor, "'interceptor' must not be null");
this.interceptors.add(interceptor);
}
@Override
public void addInterceptor(int index, ChannelInterceptor interceptor) {
Assert.notNull(interceptor, "'interceptor' must not be null");
this.interceptors.add(index, interceptor);
}
@Override
public List<ChannelInterceptor> getInterceptors() {
return Collections.unmodifiableList(this.interceptors);
}
@Override
public boolean removeInterceptor(ChannelInterceptor interceptor) {
return this.interceptors.remove(interceptor);
}
@Override
public ChannelInterceptor removeInterceptor(int index) {
return this.interceptors.remove(index);
}
@Override
public final boolean send(Message<?> message) {
return send(message, INDEFINITE_TIMEOUT);
}
@Override
public final boolean send(Message<?> message, long timeout) {
Assert.notNull(message, "Message must not be null");
Message<?> messageToUse = message;
ChannelInterceptorChain chain = new ChannelInterceptorChain();
boolean sent = false;
try {
messageToUse = chain.applyPreSend(messageToUse, this);
if (messageToUse == null) {
return false;
}
sent = sendInternal(messageToUse, timeout);
chain.applyPostSend(messageToUse, this, sent);
chain.triggerAfterSendCompletion(messageToUse, this, sent, null);
return sent;
}
catch (Exception ex) {
chain.triggerAfterSendCompletion(messageToUse, this, sent, ex);
if (ex instanceof MessagingException messagingException) {
throw messagingException;
}
throw new MessageDeliveryException(messageToUse,"Failed to send message to " + this, ex);
}
catch (Throwable err) {
MessageDeliveryException ex2 =
new MessageDeliveryException(messageToUse, "Failed to send message to " + this, err);
chain.triggerAfterSendCompletion(messageToUse, this, sent, ex2);
throw ex2;
}
}
protected abstract boolean sendInternal(Message<?> message, long timeout);
@Override
public String toString() {
return getClass().getSimpleName() + "[" + this.beanName + "]";
}
/**
* Assists with the invocation of the configured channel interceptors.
*/
protected
|
name
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/cdi/TracerProducer.java
|
{
"start": 953,
"end": 3980
}
|
class ____ {
@Produces
@ApplicationScoped
@DefaultBean
public Tracer getTracer() {
return GlobalOpenTelemetry.getTracer(INSTRUMENTATION_NAME);
}
@Produces
@RequestScoped
@DefaultBean
public Span getSpan() {
return new Span() {
@Override
public <T> Span setAttribute(final AttributeKey<T> key, final T value) {
return Span.current().setAttribute(key, value);
}
@Override
public Span addEvent(final String name, final Attributes attributes) {
return Span.current().addEvent(name, attributes);
}
@Override
public Span addEvent(
final String name,
final Attributes attributes,
final long timestamp,
final TimeUnit unit) {
return Span.current().addEvent(name, attributes, timestamp, unit);
}
@Override
public Span setStatus(final StatusCode statusCode, final String description) {
return Span.current().setStatus(statusCode, description);
}
@Override
public Span recordException(final Throwable exception, final Attributes additionalAttributes) {
return Span.current().recordException(exception, additionalAttributes);
}
@Override
public Span updateName(final String name) {
return Span.current().updateName(name);
}
@Override
public void end() {
Span.current().end();
}
@Override
public void end(final long timestamp, final TimeUnit unit) {
Span.current().end(timestamp, unit);
}
@Override
public SpanContext getSpanContext() {
return Span.current().getSpanContext();
}
@Override
public boolean isRecording() {
return Span.current().isRecording();
}
};
}
@Produces
@RequestScoped
@DefaultBean
public Baggage getBaggage() {
return new Baggage() {
@Override
public int size() {
return Baggage.current().size();
}
@Override
public void forEach(final BiConsumer<? super String, ? super BaggageEntry> consumer) {
Baggage.current().forEach(consumer);
}
@Override
public Map<String, BaggageEntry> asMap() {
return Baggage.current().asMap();
}
@Override
public String getEntryValue(final String entryKey) {
return Baggage.current().getEntryValue(entryKey);
}
@Override
public BaggageBuilder toBuilder() {
return Baggage.current().toBuilder();
}
};
}
}
|
TracerProducer
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/ExtractionUtilsTest.java
|
{
"start": 7591,
"end": 7720
}
|
class ____<T> {
public void method(T generic, List<T> list) {}
}
/** Test function. */
public static
|
ClassBase2
|
java
|
quarkusio__quarkus
|
independent-projects/tools/devtools-common/src/main/java/io/quarkus/maven/utilities/MojoUtils.java
|
{
"start": 1065,
"end": 8777
}
|
class ____ {
public static final String JAVA_FILE_EXTENSION = ".java";
public static final String KOTLIN_FILE_EXTENSION = ".kt";
public static final String SCALA_FILE_EXTENSION = ".scala";
public static final String JAVA_EXTENSION_NAME = "java";
public static final String KOTLIN_EXTENSION_NAME = "kotlin";
public static final String SCALA_EXTENSION_NAME = "scala";
public static final String TEMPLATE_PROPERTY_QUARKUS_VERSION_NAME = "quarkus.version";
public static final String TEMPLATE_PROPERTY_QUARKUS_VERSION_VALUE = toPropExpr(TEMPLATE_PROPERTY_QUARKUS_VERSION_NAME);
public static final String TEMPLATE_PROPERTY_QUARKUS_PLATFORM_GROUP_ID_NAME = "quarkus.platform.group-id";
public static final String TEMPLATE_PROPERTY_QUARKUS_PLATFORM_GROUP_ID_VALUE = toPropExpr(
TEMPLATE_PROPERTY_QUARKUS_PLATFORM_GROUP_ID_NAME);
public static final String TEMPLATE_PROPERTY_QUARKUS_PLATFORM_ARTIFACT_ID_NAME = "quarkus.platform.artifact-id";
public static final String TEMPLATE_PROPERTY_QUARKUS_PLATFORM_ARTIFACT_ID_VALUE = toPropExpr(
TEMPLATE_PROPERTY_QUARKUS_PLATFORM_ARTIFACT_ID_NAME);
public static final String TEMPLATE_PROPERTY_QUARKUS_PLATFORM_VERSION_NAME = "quarkus.platform.version";
public static final String TEMPLATE_PROPERTY_QUARKUS_PLATFORM_VERSION_VALUE = toPropExpr(
TEMPLATE_PROPERTY_QUARKUS_PLATFORM_VERSION_NAME);
public static final String TEMPLATE_PROPERTY_QUARKUS_PLUGIN_VERSION_NAME = "quarkus-plugin.version";
public static final String TEMPLATE_PROPERTY_QUARKUS_PLUGIN_VERSION_VALUE = toPropExpr(
TEMPLATE_PROPERTY_QUARKUS_PLUGIN_VERSION_NAME);
private static String toPropExpr(String name) {
return "${" + name + "}";
}
private MojoUtils() {
// Avoid direct instantiation
}
public static RepositorySystemSession muteTransferListener(RepositorySystemSession session) {
if (session.getTransferListener() == null) {
return session;
}
final DefaultRepositorySystemSession newSession = new DefaultRepositorySystemSession(session);
newSession.setTransferListener(new QuietMavenTransferListener());
return newSession;
}
/**
* Checks whether the project has the dependency
*
* @param model - the project to check existence of dependency
* @param groupId - the dependency groupId
* @param artifactId - the dependency artifactId
* @return true if the project has the dependency
*/
public static boolean hasDependency(Model model, String groupId, String artifactId) {
return model.getDependencies().stream()
.anyMatch(d -> groupId.equals(d.getGroupId())
&& artifactId.equals(d.getArtifactId()));
}
public static Dependency parse(String dependency) {
Dependency res = new Dependency();
String[] segments = dependency.split(":");
if (segments.length >= 2) {
res.setGroupId(segments[0].toLowerCase());
res.setArtifactId(segments[1].toLowerCase());
if (segments.length >= 3 && !segments[2].isEmpty()) {
res.setVersion(segments[2]);
}
if (segments.length >= 4) {
res.setClassifier(segments[3].toLowerCase());
}
return res;
} else {
throw new IllegalArgumentException("Invalid dependency description '" + dependency + "'");
}
}
/**
* Builds the configuration for the goal using Elements
*
* @param elements A list of elements for the configuration section
* @return The elements transformed into the Maven-native XML format
*/
public static Xpp3Dom configuration(Element... elements) {
Xpp3Dom dom = new Xpp3Dom("configuration");
for (Element e : elements) {
dom.addChild(e.toDom());
}
return dom;
}
/**
* Defines the plugin without its version or extensions.
*
* @param groupId The group id
* @param artifactId The artifact id
* @return The plugin instance
*/
public static Plugin plugin(String groupId, String artifactId) {
return plugin(groupId, artifactId, null);
}
/**
* Defines a plugin without extensions.
*
* @param groupId The group id
* @param artifactId The artifact id
* @param version The plugin version
* @return The plugin instance
*/
public static Plugin plugin(String groupId, String artifactId, String version) {
return plugin(groupId, artifactId, version, Collections.emptyList());
}
/**
* Defines a plugin.
*
* @param groupId The group id
* @param artifactId The artifact id
* @param version The plugin version
* @param dependencies The plugin extensions
* @return The plugin instance
*/
public static Plugin plugin(String groupId, String artifactId, String version, List<Dependency> dependencies) {
Plugin plugin = new Plugin();
plugin.setArtifactId(artifactId);
plugin.setGroupId(groupId);
plugin.setVersion(version);
plugin.setDependencies(dependencies);
return plugin;
}
public static Model readPom(final File pom) throws IOException {
try {
return Maven.readModel(pom.toPath());
} catch (UncheckedIOException e) {
throw e.getCause();
} catch (RuntimeException e) {
throw new IOException("Failed to read model", e.getCause());
}
}
public static Model readPom(final InputStream resourceAsStream) throws IOException {
try (InputStream is = resourceAsStream) {
return Maven.readModel(is);
} catch (UncheckedIOException e) {
throw e.getCause();
} catch (RuntimeException e) {
throw new IOException("Failed to read model", e.getCause());
}
}
public static String[] readGavFromPom(final InputStream resourceAsStream) throws IOException {
Model model = readPom(resourceAsStream);
return new String[] { model.getGroupId(), model.getArtifactId(), model.getVersion() };
}
public static void write(Model model, File outputFile) throws IOException {
try {
Maven.writeModel(model, outputFile.toPath());
} catch (UncheckedIOException e) {
throw e.getCause();
}
}
public static void writeFormatted(Model model, File outputFile) throws IOException {
try {
Maven.writeModel(model, outputFile.toPath(),
XMLFormat.builder().indent(" ").insertLineBreakBetweenMajorSections().build());
} catch (UncheckedIOException e) {
throw e.getCause();
}
}
public static void write(Model model, OutputStream fileOutputStream) throws IOException {
final Properties props = model.getProperties();
// until we can preserve the user ordering, it's better to stick to the alphabetical one
if (!props.isEmpty() && !(props instanceof SortedProperties)) {
final Properties sorted = new SortedProperties();
sorted.putAll(props);
model.setProperties(sorted);
}
try (XmlStreamWriter writer = WriterFactory.newXmlWriter(fileOutputStream)) {
Maven.writeModel(model, writer);
} catch (UncheckedIOException e) {
throw e.getCause();
}
}
public static String credentials(final Dependency d) {
return String.format("%s:%s", d.getGroupId(), d.getArtifactId());
}
/**
* Element wrapper
|
MojoUtils
|
java
|
google__auto
|
value/src/main/java/com/google/auto/value/extension/serializable/serializer/impl/ImmutableListSerializerExtension.java
|
{
"start": 2487,
"end": 4633
}
|
class ____ implements Serializer {
private final Serializer containedTypeSerializer;
private final SerializerFactory factory;
private final ProcessingEnvironment processingEnv;
ImmutableListSerializer(
Serializer containedTypeSerializer,
SerializerFactory factory,
ProcessingEnvironment processingEnv) {
this.containedTypeSerializer = containedTypeSerializer;
this.factory = factory;
this.processingEnv = processingEnv;
}
@Override
public TypeMirror proxyFieldType() {
TypeElement immutableListTypeElement =
processingEnv.getElementUtils().getTypeElement(ImmutableList.class.getCanonicalName());
TypeMirror containedProxyType = containedTypeSerializer.proxyFieldType();
return processingEnv
.getTypeUtils()
.getDeclaredType(immutableListTypeElement, containedProxyType);
}
@Override
public CodeBlock toProxy(CodeBlock expression) {
CodeBlock element = factory.newIdentifier("value");
return CodeBlock.of(
"$L.stream().map($T.wrapper($L -> $L)).collect($T.toImmutableList())",
expression,
FunctionWithExceptions.class,
element,
containedTypeSerializer.toProxy(element),
ImmutableList.class);
}
@Override
public CodeBlock fromProxy(CodeBlock expression) {
CodeBlock element = factory.newIdentifier("value");
return CodeBlock.of(
"$L.stream().map($T.wrapper($L -> $L)).collect($T.toImmutableList())",
expression,
FunctionWithExceptions.class,
element,
containedTypeSerializer.fromProxy(element),
ImmutableList.class);
}
}
private static boolean isImmutableList(TypeMirror type) {
if (type.getKind() != TypeKind.DECLARED) {
return false;
}
return MoreTypes.asTypeElement(type)
.getQualifiedName()
.contentEquals("com.google.common.collect.ImmutableList");
}
private static TypeMirror getContainedType(TypeMirror type) {
return MoreTypes.asDeclared(type).getTypeArguments().get(0);
}
}
|
ImmutableListSerializer
|
java
|
quarkusio__quarkus
|
test-framework/junit5-component/src/main/java/io/quarkus/test/component/QuarkusComponentTestExtension.java
|
{
"start": 39895,
"end": 40107
}
|
class ____ extends ComponentTestContextImpl implements AfterStartContext {
AfterStartContextImpl(Class<?> testClass) {
super(testClass);
}
}
private static
|
AfterStartContextImpl
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MapBindingComponentProcessorTest.java
|
{
"start": 10388,
"end": 10966
}
|
class ____ {",
" @Provides @IntoMap @StringKey(\"Admin\") Handler provideAdminHandler() {",
" return new AdminHandler();",
" }",
"}");
Source mapModuleTwoFile =
CompilerTests.javaSource(
"test.MapModuleTwo",
"package test;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"import dagger.multibindings.IntoMap;",
"import dagger.multibindings.StringKey;",
"",
"@Module",
"final
|
MapModuleOne
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-tracing-test/src/test/java/org/springframework/boot/micrometer/tracing/test/autoconfigure/AutoConfigureTracingPresentIntegrationTests.java
|
{
"start": 1289,
"end": 1776
}
|
class ____ {
@Test
void customizerDoesNotDisableAvailableMeterRegistriesWhenAnnotationPresent(
@Autowired ApplicationContext applicationContext) {
assertThat(applicationContext.getBean(Tracer.class)).isEqualTo(Tracer.NOOP);
}
@Test
void customizerDoesNotSetExportDisabledPropertyWhenAnnotationPresent(@Autowired Environment environment) {
assertThat(environment.containsProperty("management.tracing.export.enabled")).isFalse();
}
}
|
AutoConfigureTracingPresentIntegrationTests
|
java
|
apache__avro
|
lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
|
{
"start": 1500,
"end": 5907
}
|
class ____ implements Tool {
static final JsonFactory FACTORY = new JsonFactory();
private JsonGenerator generator;
private ColumnValues[] values;
private String[] shortNames;
@Override
public String getName() {
return "trevni_tojson";
}
@Override
public String getShortDescription() {
return "Dumps a Trevni file as JSON.";
}
@Override
public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception {
String filename;
boolean pretty = false;
if (args.size() == 2 && "-pretty".equals(args.get(0))) {
pretty = true;
filename = args.get(1);
} else if (args.size() == 1) {
filename = args.get(0);
} else {
err.println("Usage: [-pretty] input");
return 1;
}
toJson(TrevniUtil.input(filename), out, pretty);
return 0;
}
/** Read a Trevni file and print each row as a JSON object. */
public void toJson(Input input, PrintStream out, boolean pretty) throws IOException {
this.generator = FACTORY.createGenerator(out, JsonEncoding.UTF8);
if (pretty) {
generator.useDefaultPrettyPrinter();
} else { // ensure newline separation
MinimalPrettyPrinter pp = new MinimalPrettyPrinter();
pp.setRootValueSeparator(System.getProperty("line.separator"));
generator.setPrettyPrinter(pp);
}
ColumnFileReader reader = new ColumnFileReader(input);
int columnCount = (int) reader.getColumnCount();
this.values = new ColumnValues[columnCount];
this.shortNames = new String[columnCount];
for (int i = 0; i < columnCount; i++) {
values[i] = reader.getValues(i);
shortNames[i] = shortName(reader.getColumnMetaData(i));
}
List<ColumnMetaData> roots = reader.getRoots();
for (long row = 0; row < reader.getRowCount(); row++) {
for (ColumnValues v : values)
v.startRow();
generator.writeStartObject();
for (ColumnMetaData root : roots)
valueToJson(root);
generator.writeEndObject();
}
generator.flush();
out.println();
reader.close();
}
private void valueToJson(ColumnMetaData column) throws IOException {
generator.writeFieldName(shortNames[column.getNumber()]);
ColumnValues in = values[column.getNumber()];
if (!column.isArray()) {
primitiveToJson(column, in.nextValue());
} else {
generator.writeStartArray();
int length = in.nextLength();
for (int i = 0; i < length; i++) {
Object value = in.nextValue();
List<ColumnMetaData> children = column.getChildren();
if (children.size() == 0) {
primitiveToJson(column, value);
} else {
generator.writeStartObject();
if (value != null) {
generator.writeFieldName("value$");
primitiveToJson(column, value);
}
for (ColumnMetaData child : children)
valueToJson(child);
generator.writeEndObject();
}
}
generator.writeEndArray();
}
}
private void primitiveToJson(ColumnMetaData column, Object value) throws IOException {
switch (column.getType()) {
case NULL:
generator.writeNull();
break;
case BOOLEAN:
generator.writeBoolean((Boolean) value);
break;
case INT:
generator.writeNumber((Integer) value);
break;
case LONG:
generator.writeNumber((Long) value);
break;
case FIXED32:
generator.writeNumber((Integer) value);
break;
case FIXED64:
generator.writeNumber((Long) value);
break;
case FLOAT:
generator.writeNumber((Float) value);
break;
case DOUBLE:
generator.writeNumber((Double) value);
break;
case STRING:
generator.writeString((String) value);
break;
case BYTES:
generator.writeBinary((byte[]) value);
break;
default:
throw new RuntimeException("Unknown value type: " + column.getType());
}
}
// trim off portion of name shared with parent
private String shortName(ColumnMetaData column) {
String name = column.getName();
ColumnMetaData parent = column.getParent();
if (parent != null && name.startsWith(parent.getName()))
name = name.substring(parent.getName().length());
if (!Character.isLetterOrDigit(name.charAt(0)))
name = name.substring(1);
return name;
}
}
|
TrevniToJsonTool
|
java
|
apache__camel
|
components/camel-jcache/src/test/java/org/apache/camel/component/jcache/policy/JCachePolicyProcessorTest.java
|
{
"start": 1486,
"end": 14351
}
|
class ____ extends JCachePolicyTestBase {
private static final Logger LOG = LoggerFactory.getLogger(JCachePolicyProcessorTest.class);
//Basic test to verify value gets cached and route is not executed for the second time
@Test
public void testValueGetsCached() {
final String key = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
Cache cache = lookupCache("simple");
//Send first, key is not in cache
Object responseBody = this.template().requestBody("direct:cached-simple", key);
//We got back the value, mock was called once, value got cached.
assertEquals(generateValue(key), cache.get(key));
assertEquals(generateValue(key), responseBody);
assertEquals(1, mock.getExchanges().size());
//Send again, key is already in cache
responseBody = this.template().requestBody("direct:cached-simple", key);
//We got back the stored value, but the mock was not called again
assertEquals(generateValue(key), cache.get(key));
assertEquals(generateValue(key), responseBody);
assertEquals(1, mock.getExchanges().size());
}
//Verify policy applies only on the section of the route wrapped
@Test
public void testPartial() {
final String key = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
MockEndpoint mockUnwrapped = getMockEndpoint("mock:unwrapped");
//Send first, key is not in cache
Object responseBody = this.template().requestBody("direct:cached-partial", key);
//We got back the value, mock was called once, value got cached.
Cache cache = lookupCache("simple");
assertEquals(generateValue(key), cache.get(key));
assertEquals(generateValue(key), responseBody);
assertEquals(1, mock.getExchanges().size());
assertEquals(1, mockUnwrapped.getExchanges().size());
//Send again, key is already in cache
responseBody = this.template().requestBody("direct:cached-partial", key);
//We got back the stored value, the mock was not called again, but the unwrapped mock was
assertEquals(generateValue(key), cache.get(key));
assertEquals(generateValue(key), responseBody);
assertEquals(1, mock.getExchanges().size());
assertEquals(2, mockUnwrapped.getExchanges().size());
}
//Cache is closed
@Test
public void testClosedCache() {
final String key = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
//Send first, key is not in cache
Object responseBody = this.template().requestBody("direct:cached-closed", key);
//We got back the value, mock was called once
assertEquals(generateValue(key), responseBody);
assertEquals(1, mock.getExchanges().size());
//Send again, cache is closed
responseBody = this.template().requestBody("direct:cached-closed", key);
//We got back the stored value, mock was called again
assertEquals(generateValue(key), responseBody);
assertEquals(2, mock.getExchanges().size());
}
//Key is already stored
@Test
public void testValueWasCached() {
final String key = randomString();
final String value = "test";
MockEndpoint mock = getMockEndpoint("mock:value");
//Prestore value in cache
Cache cache = lookupCache("simple");
cache.put(key, value);
//Send first, key is already in cache
Object responseBody = this.template().requestBody("direct:cached-simple", key);
//We got back the value, mock was not called, cache was not modified
assertEquals(value, cache.get(key));
assertEquals(value, responseBody);
assertEquals(0, mock.getExchanges().size());
}
//Null final body
@Test
public void testNullResult() {
final String key = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
mock.whenAnyExchangeReceived(e -> e.getMessage().setBody(null));
//Send first
this.template().requestBody("direct:cached-simple", key);
assertEquals(1, mock.getExchanges().size());
//Send again, nothing was cached
this.template().requestBody("direct:cached-simple", key);
assertEquals(2, mock.getExchanges().size());
}
//Use a key expression ${header.mykey}
@Test
public void testKeyExpression() {
final String key = randomString();
final String body = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
Cache cache = lookupCache("simple");
//Send first, key is not in cache
Object responseBody = this.template().requestBodyAndHeader("direct:cached-byheader", body, "mykey", key);
//We got back the value, mock was called once, value got cached.
assertEquals(generateValue(body), cache.get(key));
assertEquals(generateValue(body), responseBody);
assertEquals(1, mock.getExchanges().size());
//Send again, use another body, but the same key
responseBody = this.template().requestBodyAndHeader("direct:cached-byheader", randomString(), "mykey", key);
//We got back the stored value, and the mock was not called again
assertEquals(generateValue(body), cache.get(key));
assertEquals(generateValue(body), responseBody);
assertEquals(1, mock.getExchanges().size());
}
//Key is null, ${header.mykey} is not set
@Test
public void testKeyNull() {
final String key = randomString();
String body = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
Cache cache = lookupCache("simple");
//Send first, expected header is not set
Object responseBody = this.template().requestBody("direct:cached-byheader", body);
//We got back the value, mock was called once, nothing is cached.
assertFalse(cache.containsKey("null"));
assertFalse(cache.containsKey(""));
assertFalse(cache.containsKey(key));
assertEquals(generateValue(body), responseBody);
assertEquals(1, mock.getExchanges().size());
//Send again, use another body, but the same key
body = randomString();
responseBody = this.template().requestBody("direct:cached-byheader", body);
//We got back the value, mock was called again, nothing is cached
assertFalse(cache.containsKey("null"));
assertFalse(cache.containsKey(""));
assertFalse(cache.containsKey(key));
assertEquals(generateValue(body), responseBody);
assertEquals(2, mock.getExchanges().size());
}
//Value is cached after handled exception
@Test
public void testHandledException() {
final String key = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
Cache cache = lookupCache("simple");
//Send first, key is not in cache
Object responseBody = this.template().requestBody("direct:cached-exception", key);
//We got back the value after exception handler, mock was called once, value got cached.
assertEquals("handled-" + generateValue(key), cache.get(key));
assertEquals("handled-" + generateValue(key), responseBody);
assertEquals(1, mock.getExchanges().size());
}
//Nothing is cached after an unhandled exception
@Test
public void testException() {
final String key = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
mock.whenAnyExchangeReceived(e -> {
throw new RuntimeCamelException("unexpected");
});
Cache cache = lookupCache("simple");
//Send
Exchange response = this.template().request("direct:cached-exception",
e -> e.getMessage().setBody(key));
//Exception is on the exchange, cache is empty
assertEquals("unexpected", response.getException().getMessage());
assertEquals(1, mock.getExchanges().size());
assertFalse(cache.iterator().hasNext());
}
//Use a bypass expression ${header.mybypass}
@Test
public void testBypassExpression() throws Exception {
final String key = randomString();
final String body = randomString();
MockEndpoint mock = getMockEndpoint("mock:value");
Cache cache = lookupCache("simple");
Map<String, Object> headers = new HashMap<>();
headers.put("mykey", key);
headers.put("mybypass", Boolean.TRUE);
//Send first, key is not in cache
Object responseBody = this.template().requestBodyAndHeaders("direct:cached-bypass", body, headers);
//We got back the value, mock was called once, value got cached.
assertEquals(generateValue(body), cache.get(key));
assertEquals(generateValue(body), responseBody);
assertEquals(1, mock.getExchanges().size());
//Send again, use another body, but the same key
final String body2 = randomString();
responseBody = this.template().requestBodyAndHeaders("direct:cached-bypass", body2, headers);
//We got back the value, the mock was called again, value got cached
assertEquals(generateValue(body2), cache.get(key));
assertEquals(generateValue(body2), responseBody);
assertEquals(2, mock.getExchanges().size());
//Send again, use another body, but the same key; disable bypass
headers.put("mybypass", Boolean.FALSE);
responseBody = this.template().requestBodyAndHeaders("direct:cached-bypass", body, headers);
//We got back the cached value, the mock was not called again
assertEquals(generateValue(body2), cache.get(key));
assertEquals(generateValue(body2), responseBody);
assertEquals(2, mock.getExchanges().size());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
CacheManager cacheManager = Caching.getCachingProvider().getCacheManager();
//Simple cache - with default config
Cache cache = cacheManager.createCache("simple", new MutableConfiguration<>());
JCachePolicy jcachePolicy = new JCachePolicy();
jcachePolicy.setCache(cache);
from("direct:cached-simple")
.policy(jcachePolicy)
.to("mock:value");
//Example to wrap only part of the route
from("direct:cached-partial")
.policy(jcachePolicy)
.log(LoggingLevel.DEBUG, LOG, "Executing route, not found in cache. body:${body}")
.to("mock:value")
.end()
.log(LoggingLevel.DEBUG, LOG, "This is always called. body:${body}")
.to("mock:unwrapped");
//Cache after exception handling
from("direct:cached-exception")
.onException(Exception.class)
.onWhen(exceptionMessage().isEqualTo("test"))
.handled(true)
.setBody(simple("handled-${body}"))
.end()
.policy(jcachePolicy)
.to("mock:value")
.throwException(new Exception("test"));
//Closed cache
cache = cacheManager.createCache("closed", new MutableConfiguration<>());
cache.close();
jcachePolicy = new JCachePolicy();
jcachePolicy.setCache(cache);
from("direct:cached-closed")
.policy(jcachePolicy)
.to("mock:value");
//Use ${header.mykey} as the key
jcachePolicy = new JCachePolicy();
jcachePolicy.setCache(cacheManager.getCache("simple"));
jcachePolicy.setKeyExpression(simple("${header.mykey}"));
from("direct:cached-byheader")
.policy(jcachePolicy)
.to("mock:value");
//Use ${header.mykey} as the key, ${header.mybypass} as bypass
jcachePolicy = new JCachePolicy();
jcachePolicy.setCache(cacheManager.getCache("simple"));
jcachePolicy.setKeyExpression(simple("${header.mykey}"));
jcachePolicy.setBypassExpression(simple("${header.mybypass}"));
from("direct:cached-bypass")
.policy(jcachePolicy)
.to("mock:value");
}
};
}
}
|
JCachePolicyProcessorTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/MissingDefaultTest.java
|
{
"start": 1370,
"end": 1727
}
|
class ____ {
boolean f(int i) {
// BUG: Diagnostic contains:
switch (i) {
case 42:
return true;
}
return false;
}
}
""")
.addOutputLines(
"out/Test.java",
"""
|
Test
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/basicType/DoubleTest3_random.java
|
{
"start": 300,
"end": 1956
}
|
class ____ extends TestCase {
public void test_ran() throws Exception {
Random rand = new Random();
for (int i = 0; i < 1000 * 1000 * 1; ++i) {
double val = rand.nextDouble();
String str = JSON.toJSONString(new Model(val));
Model m = JSON.parseObject(str, Model.class);
assertEquals(val, m.value);
}
}
public void test_ran_2() throws Exception {
Random rand = new Random();
for (int i = 0; i < 1000 * 1000 * 1; ++i) {
double val = rand.nextDouble();
String str = JSON.toJSONString(new Model(val), SerializerFeature.BeanToArray);
Model m = JSON.parseObject(str, Model.class, Feature.SupportArrayToBean);
assertEquals(val, m.value);
}
}
public void test_ran_3() throws Exception {
Random rand = new Random();
for (int i = 0; i < 1000 * 1000 * 1; ++i) {
double val = rand.nextDouble();
String str = JSON.toJSONString(Collections.singletonMap("val", val));
double val2 = JSON.parseObject(str).getDoubleValue("val");
assertEquals(val, val2);
}
}
public void test_ran_4() throws Exception {
Random rand = new Random();
for (int i = 0; i < 1000 * 1000 * 1; ++i) {
double val = rand.nextDouble();
HashMap map = new HashMap();
map.put("val", val);
String str = JSON.toJSONString(map);
double val2 = JSON.parseObject(str).getDoubleValue("val");
assertEquals(val, val2);
}
}
public static
|
DoubleTest3_random
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/JavaInstantGetSecondsGetNanoTest.java
|
{
"start": 2971,
"end": 3592
}
|
class ____ {
public static void foo(Instant instant) {
long seconds = instant.getEpochSecond();
if (true) {
// BUG: Diagnostic contains: JavaInstantGetSecondsGetNano
int nanos = instant.getNano();
}
}
}
""")
.doTest();
}
@Test
public void getSecondsWithGetNanosInDifferentMethods() {
compilationHelper
.addSourceLines(
"test/TestCase.java",
"""
package test;
import java.time.Instant;
public
|
TestCase
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/support/jndi/JndiCamelSingletonInitialContextFactoryTest.java
|
{
"start": 1332,
"end": 2993
}
|
class ____ extends ContextTestSupport {
private static final String FAKE = "!!! Get DataSource fake !!!";
private final Hashtable<String, String> env = new Hashtable<>();
@Override
@BeforeEach
public void setUp() throws Exception {
// use the singleton context factory
env.put(Context.INITIAL_CONTEXT_FACTORY, CamelSingletonInitialContextFactory.class.getName());
super.setUp();
}
@Override
protected Registry createCamelRegistry() throws Exception {
Context context = new InitialContext(env);
context.bind("jdbc/myDataSource", FAKE);
return new DefaultRegistry(new JndiBeanRepository(context));
}
@Test
public void testSingletonJndiContext() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived(FAKE);
template.sendBody("direct:simple", "Dummy");
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:simple").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
// calling this should get us the existing context
Context context = new InitialContext(env);
exchange.getIn().setBody(context.lookup("jdbc/myDataSource").toString());
}
}).to("mock:result");
}
};
}
}
|
JndiCamelSingletonInitialContextFactoryTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContains_Test.java
|
{
"start": 1297,
"end": 1775
}
|
class ____ extends BooleanArraysBaseTest {
private Arrays internalArrays;
@BeforeEach
@Override
public void setUp() {
super.setUp();
internalArrays = mock(Arrays.class);
setArrays(internalArrays);
}
@Test
void should_delegate_to_internal_Arrays() {
arrays.assertContains(someInfo(), actual, arrayOf(true, false));
verify(internalArrays).assertContains(someInfo(), failures, actual, arrayOf(true, false));
}
}
|
BooleanArrays_assertContains_Test
|
java
|
quarkusio__quarkus
|
extensions/spring-data-jpa/deployment/src/test/java/io/quarkus/spring/data/deployment/BookRepositoryBadAlias.java
|
{
"start": 478,
"end": 574
}
|
interface ____ {
int getPublicationYear();
Long getCount();
}
}
|
BookCountByYear
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/io/stream/ByteBufferStreamInput.java
|
{
"start": 753,
"end": 8734
}
|
class ____ extends StreamInput {
private final ByteBuffer buffer;
public ByteBufferStreamInput(ByteBuffer buffer) {
this.buffer = buffer.mark();
}
/**
* Read a vInt encoded in the format written by {@link StreamOutput#writeVInt} from a {@link ByteBuffer}.
* The buffer is assumed to contain enough bytes to fully read the value and its position is moved by this method.
* @param buffer buffer to read from
* @return value read from the buffer
* @throws IOException if buffer does not contain a valid vInt starting from the current position
*/
public static int readVInt(ByteBuffer buffer) throws IOException {
byte b = buffer.get();
if (b >= 0) {
return b;
}
int i = b & 0x7F;
b = buffer.get();
i |= (b & 0x7F) << 7;
if (b >= 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7F) << 14;
if (b >= 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7F) << 21;
if (b >= 0) {
return i;
}
b = buffer.get();
i |= (b & 0x0F) << 28;
if ((b & 0xF0) != 0) {
throwOnBrokenVInt(b, i);
}
return i;
}
/**
* Read a vLong encoded in the format written by {@link StreamOutput#writeVLong(long)} from a {@link ByteBuffer}.
* The buffer is assumed to contain enough bytes to fully read the value and its position is moved by this method.
* @param buffer buffer to read from
* @return value read from the buffer
* @throws IOException if buffer does not contain a valid vLong starting from the current position
*/
public static long readVLong(ByteBuffer buffer) throws IOException {
byte b = buffer.get();
long i = b & 0x7FL;
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7FL) << 7;
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7FL) << 14;
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7FL) << 21;
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7FL) << 28;
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7FL) << 35;
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7FL) << 42;
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
i |= (b & 0x7FL) << 49;
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
i |= ((b & 0x7FL) << 56);
if ((b & 0x80) == 0) {
return i;
}
b = buffer.get();
if (b != 0 && b != 1) {
throwOnBrokenVLong(b, i);
}
i |= ((long) b) << 63;
return i;
}
@Override
public String readString() throws IOException {
final int chars = readArraySize();
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer buffer = this.buffer;
if (buffer.hasArray()) {
// attempt reading bytes directly into a string to minimize copying
final String string = tryReadStringFromBytes(
buffer.array(),
buffer.position() + buffer.arrayOffset(),
buffer.limit() + buffer.arrayOffset(),
chars
);
if (string != null) {
return string;
}
}
return doReadString(chars);
}
@Override
public int read() throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer buffer = this.buffer;
if (buffer.hasRemaining() == false) {
return -1;
}
return buffer.get() & 0xFF;
}
@Override
public byte readByte() throws IOException {
try {
return buffer.get();
} catch (BufferUnderflowException ex) {
throw newEOFException(ex);
}
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer buffer = this.buffer;
if (buffer.hasRemaining() == false) {
return -1;
}
len = Math.min(len, buffer.remaining());
buffer.get(b, off, len);
return len;
}
@Override
public long skip(long n) throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer buffer = this.buffer;
int remaining = buffer.remaining();
if (n > remaining) {
buffer.position(buffer.limit());
return remaining;
}
buffer.position((int) (buffer.position() + n));
return n;
}
@Override
public void readBytes(byte[] b, int offset, int len) throws IOException {
try {
buffer.get(b, offset, len);
} catch (BufferUnderflowException ex) {
throw newEOFException(ex);
}
}
@Override
public short readShort() throws IOException {
try {
return buffer.getShort();
} catch (BufferUnderflowException ex) {
throw newEOFException(ex);
}
}
@Override
public int readInt() throws IOException {
try {
return buffer.getInt();
} catch (BufferUnderflowException ex) {
throw newEOFException(ex);
}
}
@Override
public int readVInt() throws IOException {
try {
return readVInt(buffer);
} catch (BufferUnderflowException ex) {
throw newEOFException(ex);
}
}
@Override
public long readLong() throws IOException {
try {
return buffer.getLong();
} catch (BufferUnderflowException ex) {
throw newEOFException(ex);
}
}
@Override
public long readVLong() throws IOException {
try {
return readVLong(buffer);
} catch (BufferUnderflowException ex) {
throw newEOFException(ex);
}
}
private static EOFException newEOFException(RuntimeException ex) {
EOFException eofException = new EOFException();
eofException.initCause(ex);
return eofException;
}
@Override
public void reset() throws IOException {
buffer.reset();
}
@Override
public int available() throws IOException {
return buffer.remaining();
}
@Override
protected void ensureCanReadBytes(int length) throws EOFException {
final int available = buffer.remaining();
if (length > available) {
throwEOF(length, available);
}
}
@Override
public BytesReference readSlicedBytesReference() throws IOException {
// cache object fields (even when final this is a valid optimization, see https://openjdk.org/jeps/8132243)
final ByteBuffer buffer = this.buffer;
if (buffer.hasArray()) {
int len = readVInt();
var res = new BytesArray(buffer.array(), buffer.arrayOffset() + buffer.position(), len);
skip(len);
return res;
}
return super.readSlicedBytesReference();
}
@Override
public void mark(int readlimit) {
buffer.mark();
}
@Override
public boolean markSupported() {
return true;
}
@Override
public void close() throws IOException {}
}
|
ByteBufferStreamInput
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeEqual_create_Test.java
|
{
"start": 1279,
"end": 2283
}
|
class ____ {
@Test
void should_create_error_message() {
// GIVEN
ErrorMessageFactory factory = shouldNotBeEqual("Yoda", "Luke");
// WHEN
String message = factory.create(new TestDescription("Jedi"), new StandardRepresentation());
// THEN
then(message).isEqualTo("[Jedi] %nExpecting actual:%n \"Yoda\"%nnot to be equal to:%n \"Luke\"%n".formatted());
}
@Test
void should_create_error_message_with_custom_comparison_strategy() {
// GIVEN
ErrorMessageFactory factory = shouldNotBeEqual("Yoda", "Luke",
new ComparatorBasedComparisonStrategy(CaseInsensitiveStringComparator.INSTANCE));
// WHEN
String message = factory.create(new TestDescription("Jedi"), new StandardRepresentation());
// THEN
then(message).isEqualTo("[Jedi] %nExpecting actual:%n \"Yoda\"%nnot to be equal to:%n \"Luke\"%nwhen comparing values using CaseInsensitiveStringComparator".formatted());
}
}
|
ShouldNotBeEqual_create_Test
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
|
{
"start": 1484,
"end": 7063
}
|
interface ____ {
/**
* The state of the file system: Safemode or Operational
* @return the state
*/
public String getFSState();
/**
* Number of allocated blocks in the system
* @return - number of allocated blocks
*/
public long getBlocksTotal();
/**
* Total storage capacity
* @return - total capacity in bytes
*/
public long getCapacityTotal();
/**
* Free (unused) storage capacity
* @return - free capacity in bytes
*/
public long getCapacityRemaining();
/**
* Used storage capacity
* @return - used capacity in bytes
*/
public long getCapacityUsed();
/**
* Total PROVIDED storage capacity.
* @return - total PROVIDED storage capacity in bytes
*/
public long getProvidedCapacityTotal();
/**
* Total number of files and directories
* @return - num of files and directories
*/
public long getFilesTotal();
/**
* Get aggregated count of all blocks pending to be reconstructed.
* @deprecated Use {@link #getPendingReconstructionBlocks()} instead.
*/
@Deprecated
public long getPendingReplicationBlocks();
/**
* Get aggregated count of all blocks pending to be reconstructed.
* @return Number of blocks to be replicated.
*/
public long getPendingReconstructionBlocks();
/**
* Get aggregated count of all blocks with low redundancy.
* @deprecated Use {@link #getLowRedundancyBlocks()} instead.
*/
@Deprecated
public long getUnderReplicatedBlocks();
/**
* Get aggregated count of all blocks with low redundancy.
* @return Number of blocks with low redundancy.
*/
public long getLowRedundancyBlocks();
/**
* Blocks scheduled for replication
* @return - num of blocks scheduled for replication
*/
public long getScheduledReplicationBlocks();
/**
* Total Load on the FSNamesystem
* @return - total load of FSNamesystem
*/
public int getTotalLoad();
/**
* Number of Live data nodes
* @return number of live data nodes
*/
public int getNumLiveDataNodes();
/**
* Number of dead data nodes
* @return number of dead data nodes
*/
public int getNumDeadDataNodes();
/**
* Number of stale data nodes
* @return number of stale data nodes
*/
public int getNumStaleDataNodes();
/**
* Number of decommissioned Live data nodes
* @return number of decommissioned live data nodes
*/
public int getNumDecomLiveDataNodes();
/**
* Number of decommissioned dead data nodes
* @return number of decommissioned dead data nodes
*/
public int getNumDecomDeadDataNodes();
/**
* @return Number of in-service data nodes, where NumInServiceDataNodes =
* NumLiveDataNodes - NumDecomLiveDataNodes - NumInMaintenanceLiveDataNodes
*/
int getNumInServiceLiveDataNodes();
/**
* Number of failed data volumes across all live data nodes.
* @return number of failed data volumes across all live data nodes
*/
int getVolumeFailuresTotal();
/**
* Returns an estimate of total capacity lost due to volume failures in bytes
* across all live data nodes.
* @return estimate of total capacity lost in bytes
*/
long getEstimatedCapacityLostTotal();
/**
* Number of data nodes that are in the decommissioning state
*/
public int getNumDecommissioningDataNodes();
/**
* The statistics of snapshots
*/
public String getSnapshotStats();
/**
* Return the maximum number of inodes in the file system
*/
public long getMaxObjects();
/**
* Number of blocks pending deletion
* @return number of blocks pending deletion
*/
long getPendingDeletionBlocks();
/**
* Time when block deletions will begin
* @return time when block deletions will begin
*/
long getBlockDeletionStartTime();
/**
* Number of content stale storages.
* @return number of content stale storages
*/
public int getNumStaleStorages();
/**
* Returns a nested JSON object listing the top users for different RPC
* operations over tracked time windows.
*
* @return JSON string
*/
public String getTopUserOpCounts();
/**
* Return the number of encryption zones in the system.
*/
int getNumEncryptionZones();
/**
* Returns the length of the wait Queue for the FSNameSystemLock.
*
* A larger number here indicates lots of threads are waiting for
* FSNameSystemLock.
* @return int - Number of Threads waiting to acquire FSNameSystemLock
*/
int getFsLockQueueLength();
/**
* Return total number of Sync Operations on FSEditLog.
*/
long getTotalSyncCount();
/**
* Return total time spent doing sync operations on FSEditLog.
*/
String getTotalSyncTimes();
/**
* @return Number of IN_MAINTENANCE live data nodes
*/
int getNumInMaintenanceLiveDataNodes();
/**
* @return Number of IN_MAINTENANCE dead data nodes
*/
int getNumInMaintenanceDeadDataNodes();
/**
* @return Number of ENTERING_MAINTENANCE data nodes
*/
int getNumEnteringMaintenanceDataNodes();
/**
* Get the current number of delegation tokens in memory.
* @return number of DTs
*/
long getCurrentTokensCount();
/**
* Returns the number of paths to be processed by storage policy satisfier.
*
* @return The number of paths to be processed by sps.
*/
int getPendingSPSPaths();
/**
* Get the progress of the reconstruction queues initialisation.
*
* @return Returns values between 0 and 1 for the progress.
*/
float getReconstructionQueuesInitProgress();
}
|
FSNamesystemMBean
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 471461,
"end": 472673
}
|
enum ____ com.fasterxml.jackson.databind.SerializationFeature, com.fasterxml.jackson.databind.DeserializationFeature, or com.fasterxml.jackson.databind.MapperFeature Multiple features can be separated by comma", displayName = "Enable Features"),
@YamlProperty(name = "enableJaxbAnnotationModule", type = "boolean", defaultValue = "false", description = "Whether to enable the JAXB annotations module when using jackson. When enabled then JAXB annotations can be used by Jackson.", displayName = "Enable Jaxb Annotation Module"),
@YamlProperty(name = "id", type = "string", description = "The id of this node", displayName = "Id"),
@YamlProperty(name = "include", type = "string", description = "If you want to marshal a pojo to JSON, and the pojo has some fields with null values. And you want to skip these null values, you can set this option to NON_NULL", displayName = "Include"),
@YamlProperty(name = "jsonView", type = "string", description = "When marshalling a POJO to JSON you might want to exclude certain fields from the JSON output. With Jackson you can use JSON views to accomplish this. This option is to refer to the
|
from
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java
|
{
"start": 9272,
"end": 11685
}
|
class ____ extends RankFeaturePhaseRankCoordinatorContext {
private final String inferenceId;
private final String inferenceText;
private final Client client;
TestRerankingRankFeaturePhaseRankCoordinatorContext(
int size,
int from,
int windowSize,
Client client,
String inferenceId,
String inferenceText,
float minScore
) {
super(size, from, windowSize, false);
this.client = client;
this.inferenceId = inferenceId;
this.inferenceText = inferenceText;
}
protected TestRerankingActionRequest generateRequest(List<String> docFeatures) {
return new TestRerankingActionRequest(docFeatures);
}
protected ActionType<TestRerankingActionResponse> actionType() {
return TEST_RERANKING_ACTION_TYPE;
}
protected float[] extractScoresFromResponse(TestRerankingActionResponse response) {
float[] scores = new float[response.scores.size()];
for (int i = 0; i < response.scores.size(); i++) {
scores[i] = response.scores.get(i);
}
return scores;
}
protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener<float[]> scoreListener) {
// Wrap the provided rankListener to an ActionListener that would handle the response from the inference service
// and then pass the results
final ActionListener<TestRerankingActionResponse> actionListener = scoreListener.delegateFailureAndWrap((l, r) -> {
float[] scores = extractScoresFromResponse(r);
assert scores.length == featureDocs.length;
l.onResponse(scores);
});
List<String> featureData = Arrays.stream(featureDocs).map(x -> x.featureData).flatMap(List::stream).toList();
TestRerankingActionRequest request = generateRequest(featureData);
try {
ActionType<TestRerankingActionResponse> action = actionType();
client.execute(action, request, actionListener);
} finally {
if (request != null) {
request.decRef();
}
}
}
}
public static
|
TestRerankingRankFeaturePhaseRankCoordinatorContext
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/manager/SupportRequestManagerFragment.java
|
{
"start": 1243,
"end": 1435
}
|
class ____
* method for details.
*/
@Deprecated
@NonNull
public RequestManagerTreeNode getRequestManagerTreeNode() {
return new EmptyRequestManagerTreeNode();
}
}
|
deprecation
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/custom/declaredtype/User.java
|
{
"start": 1296,
"end": 1451
}
|
interface ____
return emailAddresses;
}
public void setEmailAddresses(IHeadList<Email> emailAddresses) {
this.emailAddresses = emailAddresses;
}
}
|
type
|
java
|
quarkusio__quarkus
|
integration-tests/rest-client-reactive/src/test/java/io/quarkus/it/rest/client/selfsigned/SelfSignedServiceTestResource.java
|
{
"start": 428,
"end": 2280
}
|
class ____ implements QuarkusTestResourceLifecycleManager {
Vertx vertx = Vertx.vertx();
@Override
public Map<String, String> start() {
File file = new File("target/certs");
file.mkdirs();
// Generate self-signed certificate
// We do not use the junit 5 plugin to avoid having to annotate all the tests to make sure the certs are
// generated before the tests are run
CertificateGenerator generator = new CertificateGenerator(file.toPath(), false);
CertificateRequest cr = new CertificateRequest()
.withName("self-signed")
.withFormat(Format.PKCS12)
.withPassword("changeit")
.withDuration(Duration.ofDays(2))
.withCN("localhost");
try {
generator.generate(cr);
} catch (Exception e) {
throw new RuntimeException(e);
}
HttpServerOptions options = new HttpServerOptions()
.setSsl(true)
.setKeyCertOptions(new PfxOptions()
.setPath("target/certs/self-signed-keystore.p12")
.setPassword("changeit"));
var server = vertx.createHttpServer(options)
.requestHandler(req -> req.response().end("OK"))
.listen(-2).toCompletionStage().toCompletableFuture().join();
return Map.of(
"quarkus.rest-client.self-signed.url", "https://localhost:" + server.actualPort() + "/",
"quarkus.rest-client.self-signed.trust-store", "target/certs/self-signed-truststore.p12",
"quarkus.rest-client.self-signed.trust-store-password", "changeit");
}
@Override
public void stop() {
vertx.close().toCompletionStage().toCompletableFuture().join();
}
}
|
SelfSignedServiceTestResource
|
java
|
apache__dubbo
|
dubbo-spring-boot-project/dubbo-spring-boot-autoconfigure/src/main/java/org/apache/dubbo/spring/boot/autoconfigure/observability/zipkin/ZipkinConfigurations.java
|
{
"start": 7451,
"end": 8532
}
|
class ____ {
@Bean
@ConditionalOnMissingBean
ZipkinSpanExporter zipkinSpanExporter(
DubboConfigurationProperties properties, BytesEncoder<Span> encoder, ObjectProvider<Sender> senders) {
AtomicReference<Sender> senderRef = new AtomicReference<>();
senders.orderedStream().findFirst().ifPresent(senderRef::set);
Sender sender = senderRef.get();
if (sender == null) {
ExporterConfig.ZipkinConfig zipkinConfig =
properties.getTracing().getTracingExporter().getZipkinConfig();
return ZipkinSpanExporter.builder()
.setEncoder(encoder)
.setEndpoint(zipkinConfig.getEndpoint())
.setReadTimeout(zipkinConfig.getReadTimeout())
.build();
}
return ZipkinSpanExporter.builder()
.setEncoder(encoder)
.setSender(sender)
.build();
}
}
}
|
OpenTelemetryConfiguration
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ConsulEndpointBuilderFactory.java
|
{
"start": 38909,
"end": 48003
}
|
interface ____
extends
ConsulEndpointConsumerBuilder,
ConsulEndpointProducerBuilder {
default AdvancedConsulEndpointBuilder advanced() {
return (AdvancedConsulEndpointBuilder) this;
}
/**
* Connect timeout for OkHttpClient.
*
* The option is a: <code>java.time.Duration</code> type.
*
* Group: common
*
* @param connectTimeout the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder connectTimeout(java.time.Duration connectTimeout) {
doSetProperty("connectTimeout", connectTimeout);
return this;
}
/**
* Connect timeout for OkHttpClient.
*
* The option will be converted to a <code>java.time.Duration</code>
* type.
*
* Group: common
*
* @param connectTimeout the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder connectTimeout(String connectTimeout) {
doSetProperty("connectTimeout", connectTimeout);
return this;
}
/**
* The default key. Can be overridden by CamelConsulKey.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param key the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder key(String key) {
doSetProperty("key", key);
return this;
}
/**
* Configure if the AgentClient should attempt a ping before returning
* the Consul instance.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param pingInstance the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder pingInstance(boolean pingInstance) {
doSetProperty("pingInstance", pingInstance);
return this;
}
/**
* Configure if the AgentClient should attempt a ping before returning
* the Consul instance.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param pingInstance the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder pingInstance(String pingInstance) {
doSetProperty("pingInstance", pingInstance);
return this;
}
/**
* Read timeout for OkHttpClient.
*
* The option is a: <code>java.time.Duration</code> type.
*
* Group: common
*
* @param readTimeout the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder readTimeout(java.time.Duration readTimeout) {
doSetProperty("readTimeout", readTimeout);
return this;
}
/**
* Read timeout for OkHttpClient.
*
* The option will be converted to a <code>java.time.Duration</code>
* type.
*
* Group: common
*
* @param readTimeout the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder readTimeout(String readTimeout) {
doSetProperty("readTimeout", readTimeout);
return this;
}
/**
* Set tags. You can separate multiple tags by comma.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param tags the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder tags(String tags) {
doSetProperty("tags", tags);
return this;
}
/**
* The Consul agent URL.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param url the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder url(String url) {
doSetProperty("url", url);
return this;
}
/**
* Default to transform values retrieved from Consul i.e. on KV endpoint
* to string.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param valueAsString the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder valueAsString(boolean valueAsString) {
doSetProperty("valueAsString", valueAsString);
return this;
}
/**
* Default to transform values retrieved from Consul i.e. on KV endpoint
* to string.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param valueAsString the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder valueAsString(String valueAsString) {
doSetProperty("valueAsString", valueAsString);
return this;
}
/**
* Write timeout for OkHttpClient.
*
* The option is a: <code>java.time.Duration</code> type.
*
* Group: common
*
* @param writeTimeout the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder writeTimeout(java.time.Duration writeTimeout) {
doSetProperty("writeTimeout", writeTimeout);
return this;
}
/**
* Write timeout for OkHttpClient.
*
* The option will be converted to a <code>java.time.Duration</code>
* type.
*
* Group: common
*
* @param writeTimeout the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder writeTimeout(String writeTimeout) {
doSetProperty("writeTimeout", writeTimeout);
return this;
}
/**
* Sets the ACL token to be used with Consul.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param aclToken the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder aclToken(String aclToken) {
doSetProperty("aclToken", aclToken);
return this;
}
/**
* Sets the password to be used for basic authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* SSL configuration using an
* org.apache.camel.support.jsse.SSLContextParameters instance.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder sslContextParameters(org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* SSL configuration using an
* org.apache.camel.support.jsse.SSLContextParameters instance.
*
* The option will be converted to a
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder sslContextParameters(String sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Sets the username to be used for basic authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param userName the value to set
* @return the dsl builder
*/
default ConsulEndpointBuilder userName(String userName) {
doSetProperty("userName", userName);
return this;
}
}
/**
* Advanced builder for endpoint for the Consul component.
*/
public
|
ConsulEndpointBuilder
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/domain/sample/ReferencingIdClassExampleEmployee.java
|
{
"start": 796,
"end": 1168
}
|
class ____ {
@Id private Long id;
@ManyToOne private IdClassExampleEmployee employee;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public IdClassExampleEmployee getEmployee() {
return employee;
}
public void setEmployee(IdClassExampleEmployee employee) {
this.employee = employee;
}
}
|
ReferencingIdClassExampleEmployee
|
java
|
quarkusio__quarkus
|
extensions/smallrye-graphql-client/deployment/src/test/java/io/quarkus/smallrye/graphql/client/deployment/GraphQLClientEnvVarConfigTest.java
|
{
"start": 905,
"end": 1902
}
|
class ____ {
private static final String URL = "http://localhost:8080/graphql";
private static final String CONFIG_KEY = "key";
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyGraphQLClient.class, MyEnvSource.class)
.addAsServiceProvider(ConfigSource.class, MyEnvSource.class));
@Inject
GraphQLClientsConfig config; // runtime
@Inject
GraphQLClientBuildConfig buildConfig; // buildtime
@Test
void testConfigFromEnvVar() {
assertFalse(buildConfig.enableBuildTimeScanning());
GraphQLClientConfig client = config.clients().get(CONFIG_KEY);
assertNotNull(client);
assertEquals(URL,
client.url().orElseThrow(() -> new AssertException("URL not found in '%s' config".formatted(CONFIG_KEY))));
}
@GraphQLClientApi(configKey = CONFIG_KEY)
public
|
GraphQLClientEnvVarConfigTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/exceptions/Source.java
|
{
"start": 234,
"end": 558
}
|
class ____ {
private int size;
private String date;
public int getSize() {
return size;
}
public void setSize( int size ) {
this.size = size;
}
public String getDate() {
return date;
}
public void setDate( String date ) {
this.date = date;
}
}
|
Source
|
java
|
apache__camel
|
components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/hpa/KubernetesHPAComponent.java
|
{
"start": 1087,
"end": 1355
}
|
class ____ extends AbstractKubernetesComponent {
@Override
protected KubernetesHPAEndpoint doCreateEndpoint(String uri, String remaining, KubernetesConfiguration config) {
return new KubernetesHPAEndpoint(uri, this, config);
}
}
|
KubernetesHPAComponent
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/kstream/internals/foreignkeyjoin/SubscriptionSendProcessorSupplierTest.java
|
{
"start": 31007,
"end": 31445
}
|
class ____ {
private final String foreignKey;
public LeftValue(final String value) {
this.foreignKey = value;
}
public String getForeignKey() {
return foreignKey;
}
}
@SuppressWarnings("resource")
private static long[] hash(final LeftValue value) {
return Murmur3.hash128(new LeftValueSerializer().serialize("value-serde-topic", value));
}
}
|
LeftValue
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/OSSDataBlocks.java
|
{
"start": 30659,
"end": 31958
}
|
class ____ extends BlockFactory {
private BlockFactory memoryFactory;
private BlockFactory diskFactory;
MemoryAndDiskBlockFactory(AliyunOSSFileSystem owner,
BlockFactory memoryFactory) {
super(owner);
this.memoryFactory = memoryFactory;
diskFactory = new DiskBlockFactory(owner);
long memoryLimit = owner.getConf().getLong(
Constants.FAST_UPLOAD_BUFFER_MEMORY_LIMIT,
Constants.FAST_UPLOAD_BUFFER_MEMORY_LIMIT_DEFAULT);
((MemoryBlockFactory)this.memoryFactory).setMemoryLimit(memoryLimit);
}
/**
* Create a temp file and a {@link DataBlock} instance to manage it.
*
* @param index block index
* @param limit limit of the block.
* @return the new block
* @throws IOException IO problems
*/
@Override
DataBlock create(long index, int limit,
BlockOutputStreamStatistics statistics)
throws IOException {
DataBlock block = memoryFactory.create(index, limit, statistics);
if (block != null) {
return block;
} else {
return diskFactory.create(index, limit, statistics);
}
}
@VisibleForTesting
MemoryBlockFactory getMemoryFactory() {
return (MemoryBlockFactory)memoryFactory;
}
}
}
|
MemoryAndDiskBlockFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java
|
{
"start": 1136,
"end": 11200
}
|
class ____ extends ESTestCase {
public void testAllTypesOfFields() {
ExtractedField docValue1 = new DocValueField("doc1", Collections.singleton("keyword"));
ExtractedField docValue2 = new DocValueField("doc2", Collections.singleton("ip"));
ExtractedField scriptField1 = new ScriptField("scripted1");
ExtractedField scriptField2 = new ScriptField("scripted2");
ExtractedField sourceField1 = new SourceField("src1", Collections.singleton("text"));
ExtractedField sourceField2 = new SourceField("src2", Collections.singleton("text"));
ExtractedFields extractedFields = new ExtractedFields(
Arrays.asList(docValue1, docValue2, scriptField1, scriptField2, sourceField1, sourceField2),
Collections.emptyList(),
Collections.emptyMap()
);
assertThat(extractedFields.getAllFields().size(), equalTo(6));
assertThat(
extractedFields.getDocValueFields().stream().map(ExtractedField::getName).toArray(String[]::new),
equalTo(new String[] { "doc1", "doc2" })
);
assertThat(extractedFields.getSourceFields(), equalTo(new String[] { "src1", "src2" }));
}
public void testBuildGivenMixtureOfTypes() {
Map<String, FieldCapabilities> timeCaps = new HashMap<>();
timeCaps.put("date", createFieldCaps(true));
Map<String, FieldCapabilities> valueCaps = new HashMap<>();
valueCaps.put("float", createFieldCaps(true));
valueCaps.put("keyword", createFieldCaps(true));
Map<String, FieldCapabilities> airlineCaps = new HashMap<>();
airlineCaps.put("text", createFieldCaps(false));
FieldCapabilitiesResponse fieldCapabilitiesResponse = mock(FieldCapabilitiesResponse.class);
when(fieldCapabilitiesResponse.getField("time")).thenReturn(timeCaps);
when(fieldCapabilitiesResponse.getField("value")).thenReturn(valueCaps);
when(fieldCapabilitiesResponse.getField("airline")).thenReturn(airlineCaps);
ExtractedFields extractedFields = ExtractedFields.build(
new TreeSet<>(Arrays.asList("time", "value", "airline", "airport")),
new HashSet<>(Collections.singletonList("airport")),
fieldCapabilitiesResponse,
Collections.emptyMap(),
Collections.emptyList()
);
assertThat(extractedFields.getDocValueFields().size(), equalTo(2));
assertThat(extractedFields.getDocValueFields().get(0).getName(), equalTo("time"));
assertThat(extractedFields.getDocValueFields().get(0).getDocValueFormat(), equalTo("epoch_millis"));
assertThat(extractedFields.getDocValueFields().get(1).getName(), equalTo("value"));
assertThat(extractedFields.getDocValueFields().get(1).getDocValueFormat(), equalTo(null));
assertThat(extractedFields.getSourceFields(), equalTo(new String[] { "airline" }));
assertThat(extractedFields.getAllFields().size(), equalTo(4));
}
public void testBuildGivenMultiFields() {
Map<String, FieldCapabilities> text = new HashMap<>();
text.put("text", createFieldCaps(false));
Map<String, FieldCapabilities> keyword = new HashMap<>();
keyword.put("keyword", createFieldCaps(true));
FieldCapabilitiesResponse fieldCapabilitiesResponse = mock(FieldCapabilitiesResponse.class);
when(fieldCapabilitiesResponse.getField("airline")).thenReturn(text);
when(fieldCapabilitiesResponse.getField("airline.text")).thenReturn(text);
when(fieldCapabilitiesResponse.getField("airport")).thenReturn(text);
when(fieldCapabilitiesResponse.getField("airport.keyword")).thenReturn(keyword);
ExtractedFields extractedFields = ExtractedFields.build(
new TreeSet<>(Arrays.asList("airline.text", "airport.keyword")),
Collections.emptySet(),
fieldCapabilitiesResponse,
Collections.emptyMap(),
Collections.emptyList()
);
assertThat(extractedFields.getDocValueFields().size(), equalTo(1));
assertThat(extractedFields.getDocValueFields().get(0).getName(), equalTo("airport.keyword"));
assertThat(extractedFields.getSourceFields().length, equalTo(1));
assertThat(extractedFields.getSourceFields()[0], equalTo("airline"));
assertThat(extractedFields.getAllFields().size(), equalTo(2));
ExtractedField airlineField = extractedFields.getAllFields().get(0);
assertThat(airlineField.isMultiField(), is(true));
assertThat(airlineField.getName(), equalTo("airline.text"));
assertThat(airlineField.getSearchField(), equalTo("airline"));
assertThat(airlineField.getParentField(), equalTo("airline"));
ExtractedField airportField = extractedFields.getAllFields().get(1);
assertThat(airportField.isMultiField(), is(true));
assertThat(airportField.getName(), equalTo("airport.keyword"));
assertThat(airportField.getSearchField(), equalTo("airport.keyword"));
assertThat(airportField.getParentField(), equalTo("airport"));
}
public void testApplyBooleanMapping_GivenDocValueField() {
DocValueField aBool = new DocValueField("a_bool", Collections.singleton("boolean"));
ExtractedField mapped = ExtractedFields.applyBooleanMapping(aBool);
SearchHit hitTrue = new SearchHitBuilder(42).addField("a_bool", true).build();
SearchHit hitFalse = new SearchHitBuilder(42).addField("a_bool", false).build();
assertThat(mapped.value(hitTrue, new SourceSupplier(hitTrue)), equalTo(new Integer[] { 1 }));
assertThat(mapped.value(hitFalse, new SourceSupplier(hitFalse)), equalTo(new Integer[] { 0 }));
assertThat(mapped.getName(), equalTo(aBool.getName()));
assertThat(mapped.getMethod(), equalTo(aBool.getMethod()));
assertThat(mapped.supportsFromSource(), is(aBool.supportsFromSource()));
}
public void testApplyBooleanMapping_GivenSourceField() {
SourceField aBool = new SourceField("a_bool", Collections.singleton("boolean"));
ExtractedField mapped = ExtractedFields.applyBooleanMapping(aBool);
SearchHit hitTrue = new SearchHitBuilder(42).setSource("{\"a_bool\": true}").build();
SearchHit hitFalse = new SearchHitBuilder(42).setSource("{\"a_bool\": false}").build();
SearchHit hitTrueArray = new SearchHitBuilder(42).setSource("{\"a_bool\": [\"true\", true]}").build();
SearchHit hitFalseArray = new SearchHitBuilder(42).setSource("{\"a_bool\": [\"false\", false]}").build();
assertThat(mapped.value(hitTrue, new SourceSupplier(hitTrue)), equalTo(new Integer[] { 1 }));
assertThat(mapped.value(hitFalse, new SourceSupplier(hitFalse)), equalTo(new Integer[] { 0 }));
assertThat(mapped.value(hitTrueArray, new SourceSupplier(hitTrueArray)), equalTo(new Integer[] { 1, 1 }));
assertThat(mapped.value(hitFalseArray, new SourceSupplier(hitFalseArray)), equalTo(new Integer[] { 0, 0 }));
assertThat(mapped.getName(), equalTo(aBool.getName()));
assertThat(mapped.getMethod(), equalTo(aBool.getMethod()));
assertThat(mapped.supportsFromSource(), is(aBool.supportsFromSource()));
}
public void testApplyBooleanMapping_GivenNonBooleanField() {
SourceField aBool = new SourceField("not_a_bool", Collections.singleton("integer"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ExtractedFields.applyBooleanMapping(aBool));
assertThat(e.getMessage(), equalTo("cannot apply boolean mapping to field [not_a_bool]"));
}
public void testBuildGivenFieldWithoutMappings() {
FieldCapabilitiesResponse fieldCapabilitiesResponse = mock(FieldCapabilitiesResponse.class);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> ExtractedFields.build(
Collections.singleton("value"),
Collections.emptySet(),
fieldCapabilitiesResponse,
Collections.emptyMap(),
Collections.emptyList()
)
);
assertThat(e.getMessage(), equalTo("cannot retrieve field [value] because it has no mappings"));
}
public void testExtractFeatureOrganicAndProcessedNames() {
ExtractedField docValue1 = new DocValueField("doc1", Collections.singleton("keyword"));
ExtractedField docValue2 = new DocValueField("doc2", Collections.singleton("ip"));
ExtractedField scriptField1 = new ScriptField("scripted1");
ExtractedField scriptField2 = new ScriptField("scripted2");
ExtractedField sourceField1 = new SourceField("src1", Collections.singleton("text"));
ExtractedField sourceField2 = new SourceField("src2", Collections.singleton("text"));
Map<String, String> hotMap = new LinkedHashMap<>();
hotMap.put("bar", "bar_column");
hotMap.put("foo", "foo_column");
ExtractedFields extractedFields = new ExtractedFields(
Arrays.asList(docValue1, docValue2, scriptField1, scriptField2, sourceField1, sourceField2),
Arrays.asList(
new ProcessedField(new NGram("doc1", "f", new int[] { 1, 2 }, 0, 2, true)),
new ProcessedField(new OneHotEncoding("src1", hotMap, true))
),
Collections.emptyMap()
);
String[] organic = extractedFields.extractOrganicFeatureNames();
assertThat(organic, arrayContaining("doc2", "scripted1", "scripted2", "src2"));
String[] processed = extractedFields.extractProcessedFeatureNames();
assertThat(processed, arrayContaining("f.10", "f.11", "f.20", "bar_column", "foo_column"));
}
private static FieldCapabilities createFieldCaps(boolean isAggregatable) {
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
when(fieldCaps.isAggregatable()).thenReturn(isAggregatable);
return fieldCaps;
}
}
|
ExtractedFieldsTests
|
java
|
apache__spark
|
examples/src/main/java/org/apache/spark/examples/mllib/JavaMulticlassClassificationMetricsExample.java
|
{
"start": 1364,
"end": 1513
}
|
class ____ {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("Multi
|
JavaMulticlassClassificationMetricsExample
|
java
|
apache__avro
|
lang/java/protobuf/src/test/java/org/apache/avro/protobuf/noopt/Test.java
|
{
"start": 111561,
"end": 112909
}
|
class ____ extends com.google.protobuf.GeneratedMessage implements
// @@protoc_insertion_point(message_implements:org.apache.avro.protobuf.noopt.M)
MOrBuilder {
private static final long serialVersionUID = 0L;
static {
com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, /* major= */ 4, /* minor= */ 26, /* patch= */ 1,
/* suffix= */ "", M.class.getName());
}
// Use M.newBuilder() to construct.
private M(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
}
private M() {
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_M_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_M_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.noopt.Test.M.class,
org.apache.avro.protobuf.noopt.Test.M.Builder.class);
}
/**
* Protobuf enum {@code org.apache.avro.protobuf.noopt.M.N}
*/
public
|
M
|
java
|
apache__flink
|
flink-connectors/flink-connector-base/src/test/java/org/apache/flink/connector/base/source/reader/fetcher/SplitFetcherPauseResumeSplitReaderTest.java
|
{
"start": 7233,
"end": 7970
}
|
class ____<E, SplitT extends SourceSplit>
extends SingleThreadFetcherManager<E, SplitT> {
public MockSteppingSplitFetcherManager(
Supplier<SplitReader<E, SplitT>> splitReaderSupplier, Configuration configuration) {
super(splitReaderSupplier, configuration);
}
@Override
public void addSplits(List<SplitT> splitsToAdd) {
SplitFetcher<E, SplitT> fetcher = createSplitFetcher();
fetcher.addSplits(splitsToAdd);
}
public void runEachOnce() {
for (SplitFetcher<E, SplitT> fetcher : fetchers.values()) {
fetcher.runOnce();
}
}
}
private static
|
MockSteppingSplitFetcherManager
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/XTrimArgs.java
|
{
"start": 1280,
"end": 1617
}
|
class ____ implements CompositeArgument {
private Long maxlen;
private boolean approximateTrimming;
private boolean exactTrimming;
private String minId;
private Long limit;
private StreamDeletionPolicy trimmingMode;
/**
* Builder entry points for {@link XTrimArgs}.
*/
public static
|
XTrimArgs
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.