language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/core/Completable.java
|
{
"start": 110375,
"end": 110497
}
|
interface ____
* // returns the custom consumer type from above in its apply() method.
* // Such
|
and
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableSemantics.java
|
{
"start": 1880,
"end": 2341
}
|
class ____ extends ProcessTableFunction<String> {
* public void eval(Context ctx, @ArgumentHint(value = ArgumentTrait.SET_SEMANTIC_TABLE, type = "ROW < s STRING >") Row t) {
* TableSemantics semantics = ctx.tableSemanticsFor("t");
* // Always returns "ROW < s STRING >"
* semantics.dataType();
* ...
* }
* }
*
* // Function with explicit table argument type of structured type "Customer"
*
|
MyPTF
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/ClassUtils.java
|
{
"start": 61851,
"end": 61910
}
|
class ____ static nested class.
*
* @param cls the
|
or
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java
|
{
"start": 1472,
"end": 5422
}
|
class ____ extends AggregatorTestCase {
private static final String KEYWORD_FIELD = "keyword";
private static final List<String> dataset;
static {
List<String> d = new ArrayList<>(45);
for (int i = 0; i < 10; i++) {
for (int j = 0; j < i; j++) {
d.add(String.valueOf(i));
}
}
dataset = d;
}
public void testMatchNoDocs() throws IOException {
testSearchCase(
new MatchNoDocsQuery(),
dataset,
aggregation -> aggregation.field(KEYWORD_FIELD),
agg -> assertEquals(0, agg.getBuckets().size()),
null // without type hint
);
testSearchCase(
new MatchNoDocsQuery(),
dataset,
aggregation -> aggregation.field(KEYWORD_FIELD),
agg -> assertEquals(0, agg.getBuckets().size()),
ValueType.STRING // with type hint
);
}
public void testMatchAllDocs() throws IOException {
Query query = new MatchAllDocsQuery();
testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD), agg -> {
assertEquals(9, agg.getBuckets().size());
for (int i = 0; i < 9; i++) {
StringTerms.Bucket bucket = (StringTerms.Bucket) agg.getBuckets().get(i);
assertThat(bucket.getKey(), equalTo(String.valueOf(9L - i)));
assertThat(bucket.getDocCount(), equalTo(9L - i));
}
},
null // without type hint
);
testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD), agg -> {
assertEquals(9, agg.getBuckets().size());
for (int i = 0; i < 9; i++) {
StringTerms.Bucket bucket = (StringTerms.Bucket) agg.getBuckets().get(i);
assertThat(bucket.getKey(), equalTo(String.valueOf(9L - i)));
assertThat(bucket.getDocCount(), equalTo(9L - i));
}
},
ValueType.STRING // with type hint
);
}
private void testSearchCase(
Query query,
List<String> dataset,
Consumer<TermsAggregationBuilder> configure,
Consumer<InternalMappedTerms<?, ?>> verify,
ValueType valueType
) throws IOException {
boolean indexed = randomBoolean();
MappedFieldType keywordFieldType = new KeywordFieldMapper.KeywordFieldType(KEYWORD_FIELD, indexed, true, Collections.emptyMap());
FieldType luceneFieldType = new FieldType(KeywordFieldMapper.Defaults.FIELD_TYPE);
if (indexed == false) {
luceneFieldType.setIndexOptions(IndexOptions.NONE);
}
luceneFieldType.freeze();
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
for (String value : dataset) {
document.add(new Field(KEYWORD_FIELD, new BytesRef(value), luceneFieldType));
indexWriter.addDocument(document);
document.clear();
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name");
if (valueType != null) {
aggregationBuilder.userValueTypeHint(valueType);
}
if (configure != null) {
configure.accept(aggregationBuilder);
}
InternalMappedTerms<?, ?> rareTerms = searchAndReduce(
indexReader,
new AggTestConfig(aggregationBuilder, keywordFieldType).withQuery(query)
);
verify.accept(rareTerms);
}
}
}
}
|
KeywordTermsAggregatorTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/DurationTemporalUnitTest.java
|
{
"start": 876,
"end": 1271
}
|
class ____ {
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(DurationTemporalUnit.class, getClass());
@Test
public void durationOf_good() {
helper
.addSourceLines(
"TestClass.java",
"""
import java.time.Duration;
import java.time.temporal.ChronoUnit;
public
|
DurationTemporalUnitTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java
|
{
"start": 1945,
"end": 39309
}
|
class ____ extends ESTestCase {
public void testSimpleJsonFromAndTo() throws IOException {
IndexMetadata idx1 = createFirstBackingIndex("data-stream1").build();
IndexMetadata idx2 = createFirstBackingIndex("data-stream2").build();
ReservedStateHandlerMetadata hmOne = new ReservedStateHandlerMetadata("one", Set.of("a", "b"));
ReservedStateHandlerMetadata hmTwo = new ReservedStateHandlerMetadata("two", Set.of("c", "d"));
ReservedStateErrorMetadata emOne = new ReservedStateErrorMetadata(
1L,
ReservedStateErrorMetadata.ErrorKind.VALIDATION,
List.of("Test error 1", "Test error 2")
);
ReservedStateMetadata reservedStateMetadata = ReservedStateMetadata.builder("namespace_one")
.errorMetadata(emOne)
.putHandler(hmOne)
.putHandler(hmTwo)
.build();
ReservedStateMetadata reservedStateMetadata1 = ReservedStateMetadata.builder("namespace_two").putHandler(hmTwo).build();
ProjectMetadata project = ProjectMetadata.builder(ProjectId.DEFAULT)
.put(
IndexTemplateMetadata.builder("foo")
.patterns(Collections.singletonList("bar"))
.order(1)
.settings(Settings.builder().put("setting1", "value1").put("setting2", "value2"))
.putAlias(newAliasMetadataBuilder("alias-bar1"))
.putAlias(newAliasMetadataBuilder("alias-bar2").filter("{\"term\":{\"user\":\"kimchy\"}}"))
.putAlias(newAliasMetadataBuilder("alias-bar3").routing("routing-bar"))
)
.put(
"component_template",
new ComponentTemplate(
new Template(
Settings.builder().put("setting", "value").build(),
new CompressedXContent("{\"baz\":\"eggplant\"}"),
Collections.singletonMap("alias", AliasMetadata.builder("alias").build())
),
5L,
Collections.singletonMap("my_meta", Collections.singletonMap("foo", "bar"))
)
)
.put(
"index_templatev2",
ComposableIndexTemplate.builder()
.indexPatterns(Arrays.asList("foo", "bar*"))
.template(
new Template(
Settings.builder().put("setting", "value").build(),
new CompressedXContent("{\"baz\":\"eggplant\"}"),
Collections.singletonMap("alias", AliasMetadata.builder("alias").build())
)
)
.componentTemplates(Collections.singletonList("component_template"))
.priority(5L)
.version(4L)
.metadata(Collections.singletonMap("my_meta", Collections.singletonMap("potato", "chicken")))
.dataStreamTemplate(randomBoolean() ? null : new ComposableIndexTemplate.DataStreamTemplate())
.build()
)
.put(
IndexMetadata.builder("test12")
.settings(settings(IndexVersion.current()).put("setting1", "value1").put("setting2", "value2"))
.creationDate(2L)
.numberOfShards(1)
.numberOfReplicas(2)
.putMapping(MAPPING_SOURCE1)
.putAlias(newAliasMetadataBuilder("alias1").filter(ALIAS_FILTER1))
.putAlias(newAliasMetadataBuilder("alias3").writeIndex(randomBoolean() ? null : randomBoolean()))
.putAlias(newAliasMetadataBuilder("alias4").filter(ALIAS_FILTER2))
)
.put(
IndexTemplateMetadata.builder("foo")
.patterns(Collections.singletonList("bar"))
.order(1)
.settings(Settings.builder().put("setting1", "value1").put("setting2", "value2"))
.putAlias(newAliasMetadataBuilder("alias-bar1"))
.putAlias(newAliasMetadataBuilder("alias-bar2").filter("{\"term\":{\"user\":\"kimchy\"}}"))
.putAlias(newAliasMetadataBuilder("alias-bar3").routing("routing-bar"))
)
.put(idx1, false)
.put(idx2, false)
.put(DataStreamTestHelper.newInstance("data-stream1", List.of(idx1.getIndex())))
.put(DataStreamTestHelper.newInstance("data-stream2", List.of(idx2.getIndex())))
.build();
XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
ChunkedToXContent.wrapAsToXContent(Metadata.builder().put(project).put(reservedStateMetadata).put(reservedStateMetadata1).build())
.toXContent(
builder,
new ToXContent.MapParams(Map.of("binary", "true", Metadata.CONTEXT_MODE_PARAM, Metadata.CONTEXT_MODE_GATEWAY))
);
builder.endObject();
Metadata parsedMetadata;
try (var parser = createParser(builder)) {
parsedMetadata = Metadata.Builder.fromXContent(parser);
}
// templates
final var parsedProject = parsedMetadata.getProject(ProjectId.DEFAULT);
assertThat(parsedProject.templates().get("foo").name(), is("foo"));
assertThat(parsedProject.templates().get("foo").patterns(), is(Collections.singletonList("bar")));
assertThat(parsedProject.templates().get("foo").settings().get("index.setting1"), is("value1"));
assertThat(parsedProject.templates().get("foo").settings().getByPrefix("index.").get("setting2"), is("value2"));
assertThat(parsedProject.templates().get("foo").aliases().size(), equalTo(3));
assertThat(parsedProject.templates().get("foo").aliases().get("alias-bar1").alias(), equalTo("alias-bar1"));
assertThat(parsedProject.templates().get("foo").aliases().get("alias-bar2").alias(), equalTo("alias-bar2"));
assertThat(
parsedProject.templates().get("foo").aliases().get("alias-bar2").filter().string(),
equalTo("{\"term\":{\"user\":\"kimchy\"}}")
);
assertThat(parsedProject.templates().get("foo").aliases().get("alias-bar3").alias(), equalTo("alias-bar3"));
assertThat(parsedProject.templates().get("foo").aliases().get("alias-bar3").indexRouting(), equalTo("routing-bar"));
assertThat(parsedProject.templates().get("foo").aliases().get("alias-bar3").searchRouting(), equalTo("routing-bar"));
// component template
assertNotNull(parsedProject.componentTemplates().get("component_template"));
assertThat(parsedProject.componentTemplates().get("component_template").version(), is(5L));
assertThat(
parsedProject.componentTemplates().get("component_template").metadata(),
equalTo(Collections.singletonMap("my_meta", Collections.singletonMap("foo", "bar")))
);
assertThat(
parsedProject.componentTemplates().get("component_template").template(),
equalTo(
new Template(
Settings.builder().put("setting", "value").build(),
new CompressedXContent("{\"baz\":\"eggplant\"}"),
Collections.singletonMap("alias", AliasMetadata.builder("alias").build())
)
)
);
// index template v2
assertNotNull(parsedProject.templatesV2().get("index_templatev2"));
assertThat(parsedProject.templatesV2().get("index_templatev2").priority(), is(5L));
assertThat(parsedProject.templatesV2().get("index_templatev2").version(), is(4L));
assertThat(parsedProject.templatesV2().get("index_templatev2").indexPatterns(), is(Arrays.asList("foo", "bar*")));
assertThat(parsedProject.templatesV2().get("index_templatev2").composedOf(), is(Collections.singletonList("component_template")));
assertThat(
parsedProject.templatesV2().get("index_templatev2").metadata(),
equalTo(Collections.singletonMap("my_meta", Collections.singletonMap("potato", "chicken")))
);
assertThat(
parsedProject.templatesV2().get("index_templatev2").template(),
equalTo(
new Template(
Settings.builder().put("setting", "value").build(),
new CompressedXContent("{\"baz\":\"eggplant\"}"),
Collections.singletonMap("alias", AliasMetadata.builder("alias").build())
)
)
);
// data streams
assertNotNull(parsedProject.dataStreams().get("data-stream1"));
assertThat(parsedProject.dataStreams().get("data-stream1").getName(), is("data-stream1"));
assertThat(parsedProject.dataStreams().get("data-stream1").getIndices(), contains(idx1.getIndex()));
assertNotNull(parsedProject.dataStreams().get("data-stream2"));
assertThat(parsedProject.dataStreams().get("data-stream2").getName(), is("data-stream2"));
assertThat(parsedProject.dataStreams().get("data-stream2").getIndices(), contains(idx2.getIndex()));
// reserved 'operator' metadata
assertEquals(reservedStateMetadata, parsedMetadata.reservedStateMetadata().get(reservedStateMetadata.namespace()));
assertEquals(reservedStateMetadata1, parsedMetadata.reservedStateMetadata().get(reservedStateMetadata1.namespace()));
}
private static final String MAPPING_SOURCE1 = """
{"mapping1":{"text1":{"type":"string"}}}""";
private static final String MAPPING_SOURCE2 = """
{"mapping2":{"text2":{"type":"string"}}}""";
private static final String ALIAS_FILTER1 = "{\"field1\":\"value1\"}";
private static final String ALIAS_FILTER2 = "{\"field2\":\"value2\"}";
public void testToXContentGateway_MultiProject() throws IOException {
Map<String, String> mapParams = Map.of(
Metadata.CONTEXT_MODE_PARAM,
CONTEXT_MODE_GATEWAY,
"flat_settings",
"true",
"multi-project",
"true"
);
Metadata metadata = buildMetadata();
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
builder.endObject();
assertEquals(Strings.format("""
{
"meta-data" : {
"version" : 0,
"cluster_uuid" : "clusterUUID",
"cluster_uuid_committed" : false,
"cluster_coordination" : {
"term" : 1,
"last_committed_config" : [
"commitedConfigurationNodeId"
],
"last_accepted_config" : [
"acceptedConfigurationNodeId"
],
"voting_config_exclusions" : [
{
"node_id" : "exlucdedNodeId",
"node_name" : "excludedNodeName"
}
]
},
"settings" : {
"index.version.created" : "%s"
},
"projects" : [
{
"id" : "default",
"templates" : {
"template" : {
"order" : 0,
"index_patterns" : [
"pattern1",
"pattern2"
],
"settings" : {
"index.version.created" : "%s"
},
"mappings" : {
"key1" : { }
},
"aliases" : { }
}
},
"index-graveyard" : {
"tombstones" : [ ]
}
}
],
"reserved_state" : { }
}
}""", IndexVersion.current(), IndexVersion.current(), IndexVersion.current()), Strings.toString(builder));
}
public void testToXContentGateway_FlatSettingTrue_ReduceMappingFalse() throws IOException {
Map<String, String> mapParams = Map.of(
Metadata.CONTEXT_MODE_PARAM,
CONTEXT_MODE_GATEWAY,
"flat_settings",
"true",
"reduce_mappings",
"false"
);
Metadata metadata = buildMetadata();
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
builder.endObject();
assertEquals(Strings.format("""
{
"meta-data" : {
"version" : 0,
"cluster_uuid" : "clusterUUID",
"cluster_uuid_committed" : false,
"cluster_coordination" : {
"term" : 1,
"last_committed_config" : [
"commitedConfigurationNodeId"
],
"last_accepted_config" : [
"acceptedConfigurationNodeId"
],
"voting_config_exclusions" : [
{
"node_id" : "exlucdedNodeId",
"node_name" : "excludedNodeName"
}
]
},
"settings" : {
"index.version.created" : "%s"
},
"templates" : {
"template" : {
"order" : 0,
"index_patterns" : [
"pattern1",
"pattern2"
],
"settings" : {
"index.version.created" : "%s"
},
"mappings" : {
"key1" : { }
},
"aliases" : { }
}
},
"index-graveyard" : {
"tombstones" : [ ]
},
"reserved_state" : { }
}
}""", IndexVersion.current(), IndexVersion.current()), Strings.toString(builder));
}
public void testToXContentAPI_SameTypeName() throws IOException {
Map<String, String> mapParams = Map.of(Metadata.CONTEXT_MODE_PARAM, CONTEXT_MODE_API);
Metadata metadata = Metadata.builder()
.clusterUUID("clusterUUID")
.coordinationMetadata(CoordinationMetadata.builder().build())
.put(
ProjectMetadata.builder(ProjectId.DEFAULT)
.put(
IndexMetadata.builder("index")
.state(IndexMetadata.State.OPEN)
.settings(Settings.builder().put(SETTING_VERSION_CREATED, IndexVersion.current()))
.putMapping(
new MappingMetadata(
"type",
// the type name is the root value,
// the original logic in ClusterState.toXContent will reduce
Map.of("type", Map.of("key", "value"))
)
)
.numberOfShards(1)
.primaryTerm(0, 1L)
.numberOfReplicas(2)
)
)
.build();
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
builder.endObject();
assertEquals(Strings.format("""
{
"metadata" : {
"cluster_uuid" : "clusterUUID",
"cluster_uuid_committed" : false,
"cluster_coordination" : {
"term" : 0,
"last_committed_config" : [ ],
"last_accepted_config" : [ ],
"voting_config_exclusions" : [ ]
},
"templates" : { },
"indices" : {
"index" : {
"version" : 2,
"transport_version" : "0",
"mapping_version" : 1,
"settings_version" : 1,
"aliases_version" : 1,
"routing_num_shards" : 1,
"state" : "open",
"settings" : {
"index" : {
"number_of_shards" : "1",
"number_of_replicas" : "2",
"version" : {
"created" : "%s"
}
}
},
"mappings" : {
"type" : {
"key" : "value"
}
},
"aliases" : [ ],
"primary_terms" : {
"0" : 1
},
"in_sync_allocations" : {
"0" : [ ]
},
"rollover_info" : { },
"mappings_updated_version" : %s,
"system" : false,
"timestamp_range" : {
"shards" : [ ]
},
"event_ingested_range" : {
"shards" : [ ]
}
}
},
"index-graveyard" : {
"tombstones" : [ ]
},
"reserved_state" : { }
}
}""", IndexVersion.current(), IndexVersion.current()), Strings.toString(builder));
}
public void testToXContentGateway_FlatSettingFalse_ReduceMappingTrue() throws IOException {
Map<String, String> mapParams = Map.of(
Metadata.CONTEXT_MODE_PARAM,
CONTEXT_MODE_GATEWAY,
"flat_settings",
"false",
"reduce_mappings",
"true"
);
Metadata metadata = buildMetadata();
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
builder.endObject();
assertEquals(Strings.format("""
{
"meta-data" : {
"version" : 0,
"cluster_uuid" : "clusterUUID",
"cluster_uuid_committed" : false,
"cluster_coordination" : {
"term" : 1,
"last_committed_config" : [
"commitedConfigurationNodeId"
],
"last_accepted_config" : [
"acceptedConfigurationNodeId"
],
"voting_config_exclusions" : [
{
"node_id" : "exlucdedNodeId",
"node_name" : "excludedNodeName"
}
]
},
"settings" : {
"index.version.created" : "%s"
},
"templates" : {
"template" : {
"order" : 0,
"index_patterns" : [
"pattern1",
"pattern2"
],
"settings" : {
"index" : {
"version" : {
"created" : "%s"
}
}
},
"mappings" : { },
"aliases" : { }
}
},
"index-graveyard" : {
"tombstones" : [ ]
},
"reserved_state" : { }
}
}""", IndexVersion.current(), IndexVersion.current()), Strings.toString(builder));
}
public void testToXContentAPI_FlatSettingTrue_ReduceMappingFalse() throws IOException {
Map<String, String> mapParams = Map.of(
Metadata.CONTEXT_MODE_PARAM,
CONTEXT_MODE_API,
"flat_settings",
"true",
"reduce_mappings",
"false"
);
final Metadata metadata = buildMetadata();
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
builder.endObject();
assertEquals(Strings.format("""
{
"metadata" : {
"cluster_uuid" : "clusterUUID",
"cluster_uuid_committed" : false,
"cluster_coordination" : {
"term" : 1,
"last_committed_config" : [
"commitedConfigurationNodeId"
],
"last_accepted_config" : [
"acceptedConfigurationNodeId"
],
"voting_config_exclusions" : [
{
"node_id" : "exlucdedNodeId",
"node_name" : "excludedNodeName"
}
]
},
"templates" : {
"template" : {
"order" : 0,
"index_patterns" : [
"pattern1",
"pattern2"
],
"settings" : {
"index.version.created" : "%s"
},
"mappings" : {
"key1" : { }
},
"aliases" : { }
}
},
"indices" : {
"index" : {
"version" : 2,
"transport_version" : "0",
"mapping_version" : 1,
"settings_version" : 1,
"aliases_version" : 1,
"routing_num_shards" : 1,
"state" : "open",
"settings" : {
"index.number_of_replicas" : "2",
"index.number_of_shards" : "1",
"index.version.created" : "%s"
},
"mappings" : {
"type" : {
"type1" : {
"key" : "value"
}
}
},
"aliases" : [
"alias"
],
"primary_terms" : {
"0" : 1
},
"in_sync_allocations" : {
"0" : [
"allocationId"
]
},
"rollover_info" : {
"rolloveAlias" : {
"met_conditions" : { },
"time" : 1
}
},
"mappings_updated_version" : %s,
"system" : false,
"timestamp_range" : {
"shards" : [ ]
},
"event_ingested_range" : {
"shards" : [ ]
}
}
},
"index-graveyard" : {
"tombstones" : [ ]
},
"reserved_state" : { }
}
}""", IndexVersion.current(), IndexVersion.current(), IndexVersion.current()), Strings.toString(builder));
}
public void testToXContentAPI_FlatSettingFalse_ReduceMappingTrue() throws IOException {
Map<String, String> mapParams = Map.of(
Metadata.CONTEXT_MODE_PARAM,
CONTEXT_MODE_API,
"flat_settings",
"false",
"reduce_mappings",
"true"
);
final Metadata metadata = buildMetadata();
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
builder.endObject();
assertEquals(Strings.format("""
{
"metadata" : {
"cluster_uuid" : "clusterUUID",
"cluster_uuid_committed" : false,
"cluster_coordination" : {
"term" : 1,
"last_committed_config" : [
"commitedConfigurationNodeId"
],
"last_accepted_config" : [
"acceptedConfigurationNodeId"
],
"voting_config_exclusions" : [
{
"node_id" : "exlucdedNodeId",
"node_name" : "excludedNodeName"
}
]
},
"templates" : {
"template" : {
"order" : 0,
"index_patterns" : [
"pattern1",
"pattern2"
],
"settings" : {
"index" : {
"version" : {
"created" : "%s"
}
}
},
"mappings" : { },
"aliases" : { }
}
},
"indices" : {
"index" : {
"version" : 2,
"transport_version" : "0",
"mapping_version" : 1,
"settings_version" : 1,
"aliases_version" : 1,
"routing_num_shards" : 1,
"state" : "open",
"settings" : {
"index" : {
"number_of_shards" : "1",
"number_of_replicas" : "2",
"version" : {
"created" : "%s"
}
}
},
"mappings" : {
"type" : {
"type1" : {
"key" : "value"
}
}
},
"aliases" : [
"alias"
],
"primary_terms" : {
"0" : 1
},
"in_sync_allocations" : {
"0" : [
"allocationId"
]
},
"rollover_info" : {
"rolloveAlias" : {
"met_conditions" : { },
"time" : 1
}
},
"mappings_updated_version" : %s,
"system" : false,
"timestamp_range" : {
"shards" : [ ]
},
"event_ingested_range" : {
"shards" : [ ]
}
}
},
"index-graveyard" : {
"tombstones" : [ ]
},
"reserved_state" : { }
}
}""", IndexVersion.current(), IndexVersion.current(), IndexVersion.current()), Strings.toString(builder));
}
public void testToXContentAPIReservedMetadata() throws IOException {
Map<String, String> mapParams = Map.of(
Metadata.CONTEXT_MODE_PARAM,
CONTEXT_MODE_API,
"flat_settings",
"false",
"reduce_mappings",
"true"
);
Metadata metadata = buildMetadata();
ReservedStateHandlerMetadata hmOne = new ReservedStateHandlerMetadata("one", Set.of("a", "b"));
ReservedStateHandlerMetadata hmTwo = new ReservedStateHandlerMetadata("two", Set.of("c", "d"));
ReservedStateHandlerMetadata hmThree = new ReservedStateHandlerMetadata("three", Set.of("e", "f"));
ReservedStateErrorMetadata emOne = new ReservedStateErrorMetadata(
1L,
ReservedStateErrorMetadata.ErrorKind.VALIDATION,
List.of("Test error 1", "Test error 2")
);
ReservedStateErrorMetadata emTwo = new ReservedStateErrorMetadata(
2L,
ReservedStateErrorMetadata.ErrorKind.TRANSIENT,
List.of("Test error 3", "Test error 4")
);
ReservedStateMetadata omOne = ReservedStateMetadata.builder("namespace_one")
.errorMetadata(emOne)
.putHandler(hmOne)
.putHandler(hmTwo)
.build();
ReservedStateMetadata omTwo = ReservedStateMetadata.builder("namespace_two").errorMetadata(emTwo).putHandler(hmThree).build();
metadata = Metadata.builder(metadata).put(omOne).put(omTwo).build();
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
builder.endObject();
assertEquals(Strings.format("""
{
"metadata" : {
"cluster_uuid" : "clusterUUID",
"cluster_uuid_committed" : false,
"cluster_coordination" : {
"term" : 1,
"last_committed_config" : [
"commitedConfigurationNodeId"
],
"last_accepted_config" : [
"acceptedConfigurationNodeId"
],
"voting_config_exclusions" : [
{
"node_id" : "exlucdedNodeId",
"node_name" : "excludedNodeName"
}
]
},
"templates" : {
"template" : {
"order" : 0,
"index_patterns" : [
"pattern1",
"pattern2"
],
"settings" : {
"index" : {
"version" : {
"created" : "%s"
}
}
},
"mappings" : { },
"aliases" : { }
}
},
"indices" : {
"index" : {
"version" : 2,
"transport_version" : "0",
"mapping_version" : 1,
"settings_version" : 1,
"aliases_version" : 1,
"routing_num_shards" : 1,
"state" : "open",
"settings" : {
"index" : {
"number_of_shards" : "1",
"number_of_replicas" : "2",
"version" : {
"created" : "%s"
}
}
},
"mappings" : {
"type" : {
"type1" : {
"key" : "value"
}
}
},
"aliases" : [
"alias"
],
"primary_terms" : {
"0" : 1
},
"in_sync_allocations" : {
"0" : [
"allocationId"
]
},
"rollover_info" : {
"rolloveAlias" : {
"met_conditions" : { },
"time" : 1
}
},
"mappings_updated_version" : %s,
"system" : false,
"timestamp_range" : {
"shards" : [ ]
},
"event_ingested_range" : {
"shards" : [ ]
}
}
},
"index-graveyard" : {
"tombstones" : [ ]
},
"reserved_state" : {
"namespace_one" : {
"version" : -9223372036854775808,
"handlers" : {
"one" : {
"keys" : [
"a",
"b"
]
},
"two" : {
"keys" : [
"c",
"d"
]
}
},
"errors" : {
"version" : 1,
"error_kind" : "validation",
"errors" : [
"Test error 1",
"Test error 2"
]
}
},
"namespace_two" : {
"version" : -9223372036854775808,
"handlers" : {
"three" : {
"keys" : [
"e",
"f"
]
}
},
"errors" : {
"version" : 2,
"error_kind" : "transient",
"errors" : [
"Test error 3",
"Test error 4"
]
}
}
}
}
}""", IndexVersion.current(), IndexVersion.current(), IndexVersion.current()), Strings.toString(builder));
}
private Metadata buildMetadata() throws IOException {
return Metadata.builder()
.clusterUUID("clusterUUID")
.coordinationMetadata(
CoordinationMetadata.builder()
.term(1)
.lastCommittedConfiguration(new CoordinationMetadata.VotingConfiguration(Set.of("commitedConfigurationNodeId")))
.lastAcceptedConfiguration(new CoordinationMetadata.VotingConfiguration(Set.of("acceptedConfigurationNodeId")))
.addVotingConfigExclusion(new CoordinationMetadata.VotingConfigExclusion("exlucdedNodeId", "excludedNodeName"))
.build()
)
.persistentSettings(Settings.builder().put(SETTING_VERSION_CREATED, IndexVersion.current()).build())
.transientSettings(Settings.builder().put(SETTING_VERSION_CREATED, IndexVersion.current()).build())
.put(
ProjectMetadata.builder(ProjectId.DEFAULT)
.put(
IndexMetadata.builder("index")
.state(IndexMetadata.State.OPEN)
.settings(Settings.builder().put(SETTING_VERSION_CREATED, IndexVersion.current()))
.putMapping(new MappingMetadata("type", Map.of("type1", Map.of("key", "value"))))
.putAlias(AliasMetadata.builder("alias").indexRouting("indexRouting").build())
.numberOfShards(1)
.primaryTerm(0, 1L)
.putInSyncAllocationIds(0, Set.of("allocationId"))
.numberOfReplicas(2)
.putRolloverInfo(new RolloverInfo("rolloveAlias", List.of(), 1L))
)
.put(
IndexTemplateMetadata.builder("template")
.patterns(List.of("pattern1", "pattern2"))
.order(0)
.settings(Settings.builder().put(SETTING_VERSION_CREATED, IndexVersion.current()))
.putMapping("type", "{ \"key1\": {} }")
.build()
)
)
.build();
}
public static
|
ToAndFromJsonMetadataTests
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/MvcUriComponentsBuilder.java
|
{
"start": 4678,
"end": 6046
}
|
class ____ {
/**
* Well-known name for the {@link CompositeUriComponentsContributor} object in the bean factory.
*/
public static final String MVC_URI_COMPONENTS_CONTRIBUTOR_BEAN_NAME = "mvcUriComponentsContributor";
private static final Log logger = LogFactory.getLog(MvcUriComponentsBuilder.class);
private static final SpringObjenesis objenesis = new SpringObjenesis();
private static final PathMatcher pathMatcher = new AntPathMatcher();
private static final ParameterNameDiscoverer parameterNameDiscoverer = new DefaultParameterNameDiscoverer();
private static final CompositeUriComponentsContributor defaultUriComponentsContributor;
static {
defaultUriComponentsContributor = new CompositeUriComponentsContributor(
new PathVariableMethodArgumentResolver(), new RequestParamMethodArgumentResolver(false));
}
private final UriComponentsBuilder baseUrl;
/**
* Default constructor. Protected to prevent direct instantiation.
* @see #fromController(Class)
* @see #fromMethodName(Class, String, Object...)
* @see #fromMethodCall(Object)
* @see #fromMappingName(String)
* @see #fromMethod(Class, Method, Object...)
*/
protected MvcUriComponentsBuilder(UriComponentsBuilder baseUrl) {
Assert.notNull(baseUrl, "'baseUrl' is required");
this.baseUrl = baseUrl;
}
/**
* Create an instance of this
|
MvcUriComponentsBuilder
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/DateFormat.java
|
{
"start": 1540,
"end": 1792
}
|
class ____ implements DateTimeFormatterProvider {
@Override
public DateTimeFormatter get() {
throw new IllegalStateException("Should never be called");
}
}
}
}
|
UnsetDateTimeFormatterProvider
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SetAttr3.java
|
{
"start": 1533,
"end": 4668
}
|
enum ____ {
MODE, UID, GID, SIZE, ATIME, MTIME
};
public SetAttr3() {
mode = 0;
uid = 0;
gid = 0;
size = 0;
updateFields = EnumSet.noneOf(SetAttrField.class);
}
public SetAttr3(int mode, int uid, int gid, long size, NfsTime atime,
NfsTime mtime, EnumSet<SetAttrField> updateFields) {
this.mode = mode;
this.uid = uid;
this.gid = gid;
this.size = size;
this.updateFields = updateFields;
}
public int getMode() {
return mode;
}
public int getUid() {
return uid;
}
public int getGid() {
return gid;
}
public void setGid(int gid) {
this.gid = gid;
}
public long getSize() {
return size;
}
public NfsTime getAtime() {
return atime;
}
public NfsTime getMtime() {
return mtime;
}
public EnumSet<SetAttrField> getUpdateFields() {
return updateFields;
}
public void setUpdateFields(EnumSet<SetAttrField> updateFields) {
this.updateFields = updateFields;
}
public void serialize(XDR xdr) {
if (!updateFields.contains(SetAttrField.MODE)) {
xdr.writeBoolean(false);
} else {
xdr.writeBoolean(true);
xdr.writeInt(mode);
}
if (!updateFields.contains(SetAttrField.UID)) {
xdr.writeBoolean(false);
} else {
xdr.writeBoolean(true);
xdr.writeInt(uid);
}
if (!updateFields.contains(SetAttrField.GID)) {
xdr.writeBoolean(false);
} else {
xdr.writeBoolean(true);
xdr.writeInt(gid);
}
if (!updateFields.contains(SetAttrField.SIZE)) {
xdr.writeBoolean(false);
} else {
xdr.writeBoolean(true);
xdr.writeLongAsHyper(size);
}
if (!updateFields.contains(SetAttrField.ATIME)) {
xdr.writeBoolean(false);
} else {
xdr.writeBoolean(true);
atime.serialize(xdr);
}
if (!updateFields.contains(SetAttrField.MTIME)) {
xdr.writeBoolean(false);
} else {
xdr.writeBoolean(true);
mtime.serialize(xdr);
}
}
public void deserialize(XDR xdr) {
if (xdr.readBoolean()) {
mode = xdr.readInt();
updateFields.add(SetAttrField.MODE);
}
if (xdr.readBoolean()) {
uid = xdr.readInt();
updateFields.add(SetAttrField.UID);
}
if (xdr.readBoolean()) {
gid = xdr.readInt();
updateFields.add(SetAttrField.GID);
}
if (xdr.readBoolean()) {
size = xdr.readHyper();
updateFields.add(SetAttrField.SIZE);
}
int timeSetHow = xdr.readInt();
if (timeSetHow == TIME_SET_TO_CLIENT_TIME) {
atime = NfsTime.deserialize(xdr);
updateFields.add(SetAttrField.ATIME);
} else if (timeSetHow == TIME_SET_TO_SERVER_TIME) {
atime = new NfsTime(System.currentTimeMillis());
updateFields.add(SetAttrField.ATIME);
}
timeSetHow = xdr.readInt();
if (timeSetHow == TIME_SET_TO_CLIENT_TIME) {
mtime = NfsTime.deserialize(xdr);
updateFields.add(SetAttrField.MTIME);
} else if (timeSetHow == TIME_SET_TO_SERVER_TIME) {
mtime = new NfsTime(System.currentTimeMillis());
updateFields.add(SetAttrField.MTIME);
}
}
}
|
SetAttrField
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestFairQueuePlacementUtils.java
|
{
"start": 1938,
"end": 6073
}
|
class ____ {
/**
* Test name trimming and dot replacement in names.
*/
@Test
public void testCleanName() {
// permutations of dot placements
final String clean = "clean";
final String dotted = "not.clean";
final String multiDot = "more.un.clean";
final String seqDot = "not..clean";
final String unTrimmed = " .invalid. "; // not really a valid queue
String cleaned = cleanName(clean);
assertEquals(clean, cleaned, "Name was changed and it should not");
cleaned = cleanName(dotted);
assertFalse(cleaned.contains(DOT),
"Cleaned name contains dots and it should not");
cleaned = cleanName(multiDot);
assertFalse(cleaned.contains(DOT),
"Cleaned name contains dots and it should not");
assertNotEquals(cleaned.indexOf(DOT_REPLACEMENT),
cleaned.lastIndexOf(DOT_REPLACEMENT),
"Multi dot failed: wrong replacements found");
cleaned = cleanName(seqDot);
assertFalse(cleaned.contains(DOT),
"Cleaned name contains dots and it should not");
assertNotEquals(cleaned.indexOf(DOT_REPLACEMENT),
cleaned.lastIndexOf(DOT_REPLACEMENT),
"Sequential dot failed: wrong replacements found");
cleaned = cleanName(unTrimmed);
assertTrue(cleaned.startsWith(DOT_REPLACEMENT),
"Trimming start failed: space not removed or dot not replaced");
assertTrue(cleaned.endsWith(DOT_REPLACEMENT),
"Trimming end failed: space not removed or dot not replaced");
}
@Test
public void testAssureRoot() {
// permutations of rooted queue names
final String queueName = "base";
final String rootOnly = "root";
final String rootNoDot = "rootbase";
final String alreadyRoot = "root.base";
String rooted = assureRoot(queueName);
assertTrue(rooted.startsWith(ROOT_QUEUE + DOT),
"Queue should have root prefix (base)");
rooted = assureRoot(rootOnly);
assertEquals(rootOnly, rooted,
"'root' queue should not have root prefix (root)");
rooted = assureRoot(rootNoDot);
assertTrue(rooted.startsWith(ROOT_QUEUE + DOT),
"Queue should have root prefix (rootbase)");
assertEquals(5, rooted.lastIndexOf(ROOT_QUEUE),
"'root' queue base was replaced and not prefixed");
rooted = assureRoot(alreadyRoot);
assertEquals(rooted, alreadyRoot,
"Root prefixed queue changed and it should not (root.base)");
assertNull(assureRoot(null), "Null queue did not return null queue");
assertEquals("", assureRoot(""),
"Empty queue did not return empty name");
}
@Test
public void testIsValidQueueName() {
// permutations of valid/invalid names
final String valid = "valid";
final String validRooted = "root.valid";
final String rootOnly = "root";
final String startDot = ".invalid";
final String endDot = "invalid.";
final String startSpace = " invalid";
final String endSpace = "invalid ";
final String unicodeSpace = "\u00A0invalid";
assertFalse(isValidQueueName(null), "'null' queue was not marked as invalid");
assertTrue(isValidQueueName(""), "empty queue was not tagged valid");
assertTrue(isValidQueueName(valid),
"Simple queue name was not tagged valid (valid)");
assertTrue(isValidQueueName(rootOnly),
"Root only queue was not tagged valid (root)");
assertTrue(isValidQueueName(validRooted),
"Root prefixed queue was not tagged valid (root.valid)");
assertFalse(isValidQueueName(startDot),
"Queue starting with dot was not tagged invalid (.invalid)");
assertFalse(isValidQueueName(endDot),
"Queue ending with dot was not tagged invalid (invalid.)");
assertFalse(isValidQueueName(startSpace),
"Queue starting with space was not tagged invalid ( invalid)");
assertFalse(isValidQueueName(endSpace),
"Queue ending with space was not tagged invalid (invalid )");
// just one for sanity check extensive tests are in the scheduler utils
assertFalse(isValidQueueName(unicodeSpace),
"Queue with unicode space was not tagged as invalid (unicode)");
}
}
|
TestFairQueuePlacementUtils
|
java
|
apache__camel
|
components/camel-jgroups-raft/src/main/java/org/apache/camel/component/jgroups/raft/JGroupsRaftProducer.java
|
{
"start": 1143,
"end": 3793
}
|
class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(JGroupsRaftProducer.class);
// Producer settings
private final JGroupsRaftEndpoint endpoint;
// Constructor
public JGroupsRaftProducer(JGroupsRaftEndpoint endpoint) {
super(endpoint);
this.endpoint = endpoint;
}
// Life cycle callbacks
@Override
protected void doStart() throws Exception {
super.doStart();
endpoint.connect();
}
@Override
protected void doStop() throws Exception {
endpoint.disconnect();
super.doStop();
}
// Processing logic
@Override
public void process(Exchange exchange) throws Exception {
//TODO: implement possibility to call CompletableFuture<byte[]> setAsync(byte[] buf, int offset, int length);
byte[] body = exchange.getIn().getBody(byte[].class);
Integer setOffset = exchange.getIn().getHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_SET_OFFSET, Integer.class);
Integer setLength = exchange.getIn().getHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_SET_LENGTH, Integer.class);
Long setTimeout = exchange.getIn().getHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_SET_TIMEOUT, Long.class);
TimeUnit setTimeUnit = exchange.getIn().getHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_SET_TIMEUNIT, TimeUnit.class);
if (body != null) {
byte[] result;
if (setOffset != null && setLength != null && setTimeout != null && setTimeUnit != null) {
LOG.debug("Calling set(byte[] {}, int {}, int {}, long {}, TimeUnit {}) method on raftHandle.", body, setOffset,
setLength, setTimeout, setTimeUnit);
result = endpoint.getResolvedRaftHandle().set(body, setOffset, setLength, setTimeout, setTimeUnit);
} else if (setOffset != null && setLength != null) {
LOG.debug("Calling set(byte[] {}, int {}, int {}) method on raftHandle.", body, setOffset, setLength);
result = endpoint.getResolvedRaftHandle().set(body, setOffset, setLength);
} else {
LOG.debug("Calling set(byte[] {}, int {}, int {} (i.e. body.length)) method on raftHandle.", body, 0,
body.length);
result = endpoint.getResolvedRaftHandle().set(body, 0, body.length);
}
endpoint.populateJGroupsRaftHeaders(exchange);
exchange.getIn().setBody(result);
} else {
LOG.debug("Body is null, cannot call set method on raftHandle.");
}
}
}
|
JGroupsRaftProducer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java
|
{
"start": 3456,
"end": 5523
}
|
class ____ extends RestEsqlTestCase {
@ClassRule
public static ElasticsearchCluster cluster = Clusters.testCluster(
specBuilder -> specBuilder.plugin("mapper-size").plugin("mapper-murmur3")
);
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
@ParametersFactory(argumentFormatting = "%1s")
public static List<Object[]> modes() {
return Arrays.stream(Mode.values()).map(m -> new Object[] { m }).toList();
}
public RestEsqlIT(Mode mode) {
super(mode);
}
public void testBasicEsql() throws IOException {
indexTimestampData(1);
RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | stats avg(value)");
if (Build.current().isSnapshot()) {
builder.pragmas(Settings.builder().put("data_partitioning", "shard").build());
}
Map<String, Object> result = runEsql(builder);
Map<String, String> colA = Map.of("name", "avg(value)", "type", "double");
assertResultMap(result, List.of(colA), List.of(List.of(499.5d)));
assertTrue(result.containsKey("took"));
}
public void testInvalidPragma() throws IOException {
assumeTrue("pragma only enabled on snapshot builds", Build.current().isSnapshot());
createIndex("test-index");
for (int i = 0; i < 10; i++) {
Request request = new Request("POST", "/test-index/_doc/");
request.addParameter("refresh", "true");
request.setJsonEntity("{\"f\":" + i + "}");
assertOK(client().performRequest(request));
}
RequestObjectBuilder builder = requestObjectBuilder().query("from test-index | limit 1 | keep f").allowPartialResults(false);
builder.pragmas(Settings.builder().put("data_partitioning", "invalid-option").build());
ResponseException re = expectThrows(ResponseException.class, () -> runEsqlSync(builder));
assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("No
|
RestEsqlIT
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KubernetesJobEndpointBuilderFactory.java
|
{
"start": 20797,
"end": 21149
}
|
class ____ extends AbstractEndpointBuilder implements KubernetesJobEndpointBuilder, AdvancedKubernetesJobEndpointBuilder {
public KubernetesJobEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new KubernetesJobEndpointBuilderImpl(path);
}
}
|
KubernetesJobEndpointBuilderImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java
|
{
"start": 1171,
"end": 4184
}
|
class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Log10LongEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator val;
private final DriverContext driverContext;
private Warnings warnings;
public Log10LongEvaluator(Source source, EvalOperator.ExpressionEvaluator val,
DriverContext driverContext) {
this.source = source;
this.val = val;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (LongBlock valBlock = (LongBlock) val.eval(page)) {
LongVector valVector = valBlock.asVector();
if (valVector == null) {
return eval(page.getPositionCount(), valBlock);
}
return eval(page.getPositionCount(), valVector);
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += val.baseRamBytesUsed();
return baseRamBytesUsed;
}
public DoubleBlock eval(int positionCount, LongBlock valBlock) {
try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (valBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
long val = valBlock.getLong(valBlock.getFirstValueIndex(p));
try {
result.appendDouble(Log10.process(val));
} catch (ArithmeticException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
public DoubleBlock eval(int positionCount, LongVector valVector) {
try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
long val = valVector.getLong(p);
try {
result.appendDouble(Log10.process(val));
} catch (ArithmeticException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
@Override
public String toString() {
return "Log10LongEvaluator[" + "val=" + val + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(val);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static
|
Log10LongEvaluator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileAggregate.java
|
{
"start": 1779,
"end": 3073
}
|
class ____ extends NumericAggregate implements EnclosedAgg, TwoOptionalArguments {
private static final PercentilesConfig.TDigest DEFAULT_PERCENTILES_CONFIG = new PercentilesConfig.TDigest();
// preferred method name to configurator mapping (type resolution, method parameter -> config)
// contains all the possible PercentilesMethods that we know of and are capable of parameterizing at the moment
private static final Map<String, MethodConfigurator> METHOD_CONFIGURATORS = new LinkedHashMap<>();
static {
Arrays.asList(new MethodConfigurator(PercentilesMethod.TDIGEST, TypeResolutions::isNumeric, methodParameter -> {
Double compression = foldNullSafe(methodParameter, DataTypes.DOUBLE);
return compression == null ? new PercentilesConfig.TDigest() : new PercentilesConfig.TDigest(compression);
}), new MethodConfigurator(PercentilesMethod.HDR, TypeResolutions::isInteger, methodParameter -> {
Integer numOfDigits = foldNullSafe(methodParameter, DataTypes.INTEGER);
return numOfDigits == null ? new PercentilesConfig.Hdr() : new PercentilesConfig.Hdr(numOfDigits);
})).forEach(c -> METHOD_CONFIGURATORS.put(c.method.getParseField().getPreferredName(), c));
}
private static
|
PercentileAggregate
|
java
|
quarkusio__quarkus
|
integration-tests/injectmock/src/test/java/io/quarkus/it/mockbean/PerClassSpyTest.java
|
{
"start": 655,
"end": 1070
}
|
class ____ {
@InjectSpy
IdentityService identityService;
@Test
@Order(1)
void testWithSpy() {
when(identityService.call(any())).thenReturn("DUMMY");
assertEquals("DUMMY", identityService.call("foo"));
}
@Test
@Order(2)
void testWithoutSpy() {
assertEquals("foo", identityService.call("foo"));
}
@ApplicationScoped
public static
|
PerClassSpyTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppUtil.java
|
{
"start": 3230,
"end": 15493
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(RMWebAppUtil.class);
/**
* Private constructor.
*/
private RMWebAppUtil() {
// not called
}
/**
* Helper method to setup filters and authentication for ResourceManager
* WebServices.
*
* Use the customized yarn filter instead of the standard kerberos filter to
* allow users to authenticate using delegation tokens 4 conditions need to be
* satisfied:
*
* 1. security is enabled.
*
* 2. http auth type is set to kerberos.
*
* 3. "yarn.resourcemanager.webapp.use-yarn-filter" override is set to true.
*
* 4. hadoop.http.filter.initializers container
* AuthenticationFilterInitializer.
*
* @param conf RM configuration.
* @param rmDTSecretManager RM specific delegation token secret manager.
**/
public static void setupSecurityAndFilters(Configuration conf,
RMDelegationTokenSecretManager rmDTSecretManager) {
boolean enableCorsFilter =
conf.getBoolean(YarnConfiguration.RM_WEBAPP_ENABLE_CORS_FILTER,
YarnConfiguration.DEFAULT_RM_WEBAPP_ENABLE_CORS_FILTER);
boolean useYarnAuthenticationFilter = conf.getBoolean(
YarnConfiguration.RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER,
YarnConfiguration.DEFAULT_RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER);
String authPrefix = "hadoop.http.authentication.";
String authTypeKey = authPrefix + "type";
String filterInitializerConfKey = "hadoop.http.filter.initializers";
String actualInitializers = "";
Class<?>[] initializersClasses = conf.getClasses(filterInitializerConfKey);
// setup CORS
if (enableCorsFilter) {
conf.setBoolean(HttpCrossOriginFilterInitializer.PREFIX
+ HttpCrossOriginFilterInitializer.ENABLED_SUFFIX, true);
}
boolean hasHadoopAuthFilterInitializer = false;
boolean hasRMAuthFilterInitializer = false;
if (initializersClasses != null) {
for (Class<?> initializer : initializersClasses) {
if (initializer.getName()
.equals(AuthenticationFilterInitializer.class.getName())) {
hasHadoopAuthFilterInitializer = true;
}
if (initializer.getName()
.equals(RMAuthenticationFilterInitializer.class.getName())) {
hasRMAuthFilterInitializer = true;
}
}
if (UserGroupInformation.isSecurityEnabled()
&& useYarnAuthenticationFilter && hasHadoopAuthFilterInitializer
&& conf.get(authTypeKey, "")
.equals(KerberosAuthenticationHandler.TYPE)) {
ArrayList<String> target = new ArrayList<String>();
for (Class<?> filterInitializer : initializersClasses) {
if (filterInitializer.getName()
.equals(AuthenticationFilterInitializer.class.getName())) {
if (!hasRMAuthFilterInitializer) {
target.add(RMAuthenticationFilterInitializer.class.getName());
}
continue;
}
target.add(filterInitializer.getName());
}
target.remove(ProxyUserAuthenticationFilterInitializer.class.getName());
actualInitializers = StringUtils.join(",", target);
LOG.info("Using RM authentication filter(kerberos/delegation-token)"
+ " for RM webapp authentication");
RMAuthenticationFilter
.setDelegationTokenSecretManager(rmDTSecretManager);
conf.set(filterInitializerConfKey, actualInitializers);
}
}
// if security is not enabled and the default filter initializer has not
// been set, set the initializer to include the
// RMAuthenticationFilterInitializer which in turn will set up the simple
// auth filter.
String initializers = conf.get(filterInitializerConfKey);
if (!UserGroupInformation.isSecurityEnabled()) {
if (initializersClasses == null || initializersClasses.length == 0) {
conf.set(filterInitializerConfKey,
RMAuthenticationFilterInitializer.class.getName());
conf.set(authTypeKey, "simple");
} else if (initializers.equals(StaticUserWebFilter.class.getName())) {
conf.set(filterInitializerConfKey,
RMAuthenticationFilterInitializer.class.getName() + ","
+ initializers);
conf.set(authTypeKey, "simple");
}
}
}
/**
* Create the actual ApplicationSubmissionContext to be submitted to the RM
* from the information provided by the user.
*
* @param newApp the information provided by the user
* @param conf RM configuration
* @return returns the constructed ApplicationSubmissionContext
* @throws IOException in case of Error
*/
public static ApplicationSubmissionContext createAppSubmissionContext(
ApplicationSubmissionContextInfo newApp, Configuration conf)
throws IOException {
// create local resources and app submission context
ApplicationId appid;
String error =
"Could not parse application id " + newApp.getApplicationId();
try {
appid = ApplicationId.fromString(newApp.getApplicationId());
} catch (Exception e) {
throw new BadRequestException(error);
}
ApplicationSubmissionContext appContext = ApplicationSubmissionContext
.newInstance(appid, newApp.getApplicationName(), newApp.getQueue(),
Priority.newInstance(newApp.getPriority()),
createContainerLaunchContext(newApp), newApp.getUnmanagedAM(),
newApp.getCancelTokensWhenComplete(), newApp.getMaxAppAttempts(),
createAppSubmissionContextResource(newApp, conf),
newApp.getApplicationType(),
newApp.getKeepContainersAcrossApplicationAttempts(),
newApp.getAppNodeLabelExpression(),
newApp.getAMContainerNodeLabelExpression());
appContext.setApplicationTags(newApp.getApplicationTags());
appContext.setAttemptFailuresValidityInterval(
newApp.getAttemptFailuresValidityInterval());
if (newApp.getLogAggregationContextInfo() != null) {
appContext.setLogAggregationContext(
createLogAggregationContext(newApp.getLogAggregationContextInfo()));
}
String reservationIdStr = newApp.getReservationId();
if (reservationIdStr != null && !reservationIdStr.isEmpty()) {
ReservationId reservationId =
ReservationId.parseReservationId(reservationIdStr);
appContext.setReservationID(reservationId);
}
return appContext;
}
/**
* Create the actual Resource inside the ApplicationSubmissionContextInfo to
* be submitted to the RM from the information provided by the user.
*
* @param newApp the information provided by the user
* @param conf RM configuration
* @return returns the constructed Resource inside the
* ApplicationSubmissionContextInfo
* @throws BadRequestException
*/
private static Resource createAppSubmissionContextResource(
ApplicationSubmissionContextInfo newApp, Configuration conf)
throws BadRequestException {
if (newApp.getResource().getvCores() > conf.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES)) {
String msg = "Requested more cores than configured max";
throw new BadRequestException(msg);
}
if (newApp.getResource().getMemorySize() > conf.getInt(
YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB)) {
String msg = "Requested more memory than configured max";
throw new BadRequestException(msg);
}
Resource r = Resource.newInstance(newApp.getResource().getMemorySize(),
newApp.getResource().getvCores());
return r;
}
/**
* Create the ContainerLaunchContext required for the
* ApplicationSubmissionContext. This function takes the user information and
* generates the ByteBuffer structures required by the ContainerLaunchContext
*
* @param newApp the information provided by the user
* @return created context
* @throws BadRequestException
* @throws IOException
*/
private static ContainerLaunchContext createContainerLaunchContext(
ApplicationSubmissionContextInfo newApp)
throws BadRequestException, IOException {
// create container launch context
HashMap<String, ByteBuffer> hmap = new HashMap<String, ByteBuffer>();
for (Map.Entry<String, String> entry : newApp
.getContainerLaunchContextInfo().getAuxillaryServiceData().entrySet()) {
if (!entry.getValue().isEmpty()) {
Base64 decoder = new Base64(0, null, true);
byte[] data = decoder.decode(entry.getValue());
hmap.put(entry.getKey(), ByteBuffer.wrap(data));
}
}
HashMap<String, LocalResource> hlr = new HashMap<String, LocalResource>();
for (Map.Entry<String, LocalResourceInfo> entry : newApp
.getContainerLaunchContextInfo().getResources().entrySet()) {
LocalResourceInfo l = entry.getValue();
LocalResource lr = LocalResource.newInstance(URL.fromURI(l.getUrl()),
l.getType(), l.getVisibility(), l.getSize(), l.getTimestamp());
hlr.put(entry.getKey(), lr);
}
DataOutputBuffer out = new DataOutputBuffer();
Credentials cs = createCredentials(
newApp.getContainerLaunchContextInfo().getCredentials());
cs.writeTokenStorageToStream(out);
ByteBuffer tokens = ByteBuffer.wrap(out.getData());
ContainerLaunchContext ctx = ContainerLaunchContext.newInstance(hlr,
newApp.getContainerLaunchContextInfo().getEnvironment(),
newApp.getContainerLaunchContextInfo().getCommands(), hmap, tokens,
newApp.getContainerLaunchContextInfo().getAcls());
return ctx;
}
/**
* Generate a Credentials object from the information in the CredentialsInfo
* object.
*
* @param credentials the CredentialsInfo provided by the user.
* @return
*/
private static Credentials createCredentials(CredentialsInfo credentials) {
Credentials ret = new Credentials();
try {
for (Map.Entry<String, String> entry : credentials.getTokens()
.entrySet()) {
Text alias = new Text(entry.getKey());
Token<TokenIdentifier> token = new Token<TokenIdentifier>();
token.decodeFromUrlString(entry.getValue());
ret.addToken(alias, token);
}
for (Map.Entry<String, String> entry : credentials.getSecrets()
.entrySet()) {
Text alias = new Text(entry.getKey());
Base64 decoder = new Base64(0, null, true);
byte[] secret = decoder.decode(entry.getValue());
ret.addSecretKey(alias, secret);
}
} catch (IOException ie) {
throw new BadRequestException(
"Could not parse credentials data; exception message = "
+ ie.getMessage());
}
return ret;
}
private static LogAggregationContext createLogAggregationContext(
LogAggregationContextInfo logAggregationContextInfo) {
return LogAggregationContext.newInstance(
logAggregationContextInfo.getIncludePattern(),
logAggregationContextInfo.getExcludePattern(),
logAggregationContextInfo.getRolledLogsIncludePattern(),
logAggregationContextInfo.getRolledLogsExcludePattern(),
logAggregationContextInfo.getLogAggregationPolicyClassName(),
logAggregationContextInfo.getLogAggregationPolicyParameters());
}
/**
* Helper method to retrieve the UserGroupInformation from the
* HttpServletRequest.
*
* @param hsr the servlet request
* @param usePrincipal true if we need to use the principal user, remote
* otherwise.
* @return the user group information of the caller.
**/
public static UserGroupInformation getCallerUserGroupInformation(
HttpServletRequest hsr, boolean usePrincipal) {
String remoteUser = hsr.getRemoteUser();
if (usePrincipal) {
Principal princ = hsr.getUserPrincipal();
remoteUser = princ == null ? null : princ.getName();
}
UserGroupInformation callerUGI = null;
if (remoteUser != null) {
callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
}
return callerUGI;
}
}
|
RMWebAppUtil
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformNodeStatsAction.java
|
{
"start": 4330,
"end": 5414
}
|
class ____ extends BaseNodeResponse implements ToXContentObject {
private final TransformSchedulerStats schedulerStats;
public NodeStatsResponse(DiscoveryNode node, TransformSchedulerStats schedulerStats) {
super(node);
this.schedulerStats = schedulerStats;
}
public NodeStatsResponse(StreamInput in) throws IOException {
super(in);
this.schedulerStats = in.readOptionalWriteable(TransformSchedulerStats::new);
}
TransformSchedulerStats schedulerStats() {
return schedulerStats;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalWriteable(schedulerStats);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(SCHEDULER_STATS_FIELD_NAME, schedulerStats);
return builder.endObject();
}
}
}
|
NodeStatsResponse
|
java
|
apache__spark
|
common/utils-java/src/main/java/org/apache/spark/api/java/function/Function0.java
|
{
"start": 960,
"end": 1037
}
|
interface ____<R> extends Serializable {
R call() throws Exception;
}
|
Function0
|
java
|
spring-projects__spring-framework
|
spring-aop/src/main/java/org/springframework/aop/support/ClassFilters.java
|
{
"start": 5536,
"end": 6173
}
|
class ____ implements ClassFilter, Serializable {
private final ClassFilter original;
NegateClassFilter(ClassFilter original) {
this.original = original;
}
@Override
public boolean matches(Class<?> clazz) {
return !this.original.matches(clazz);
}
@Override
public boolean equals(Object other) {
return (this == other || (other instanceof NegateClassFilter that &&
this.original.equals(that.original)));
}
@Override
public int hashCode() {
return Objects.hash(getClass(), this.original);
}
@Override
public String toString() {
return "Negate " + this.original;
}
}
}
|
NegateClassFilter
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/ShareConsumeRequestManager.java
|
{
"start": 76947,
"end": 77309
}
|
enum ____ {
COMMIT_ASYNC((byte) 0),
COMMIT_SYNC((byte) 1),
CLOSE((byte) 2);
public final byte id;
AcknowledgeRequestType(byte id) {
this.id = id;
}
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
}
}
|
AcknowledgeRequestType
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/group/ChannelMatchers.java
|
{
"start": 4194,
"end": 4540
}
|
class ____ implements ChannelMatcher {
private final ChannelMatcher matcher;
InvertMatcher(ChannelMatcher matcher) {
this.matcher = matcher;
}
@Override
public boolean matches(Channel channel) {
return !matcher.matches(channel);
}
}
private static final
|
InvertMatcher
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/NullJdbcType.java
|
{
"start": 593,
"end": 2271
}
|
class ____ implements JdbcType {
/**
* Singleton access
*/
public static final NullJdbcType INSTANCE = new NullJdbcType();
@Override
public int getJdbcTypeCode() {
return Types.NULL;
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<X>(javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return null;
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return null;
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return null;
}
};
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBindNull(PreparedStatement st, int index, WrapperOptions options) throws SQLException {
st.setNull( index, Types.NULL );
}
@Override
protected void doBindNull(CallableStatement st, String name, WrapperOptions options) throws SQLException {
st.setNull( name, Types.NULL );
}
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) {
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null" );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options) {
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null" );
}
};
}
}
|
NullJdbcType
|
java
|
apache__camel
|
components/camel-sjms/src/test/java/org/apache/camel/component/sjms/ReconnectInOutProducerTest.java
|
{
"start": 2072,
"end": 4116
}
|
class ____ extends JmsExclusiveTestSupport {
private static final String TEST_DESTINATION_NAME = "in.out.queue.producer.test.ReconnectInOutProducerTest";
@RegisterExtension
public static ArtemisService service = ArtemisServiceFactory.createVMService();
@Override
public ArtemisService getService() {
return service;
}
@Test
public void testInOutQueueProducer() throws Exception {
MessageConsumer mc = createQueueConsumer(TEST_DESTINATION_NAME + ".request");
assertNotNull(mc);
final String requestText = "Hello World!";
final String responseText = "How are you";
mc.setMessageListener(new MyMessageListener(requestText, responseText));
Object responseObject = template.requestBody("direct:start", requestText);
assertNotNull(responseObject);
assertTrue(responseObject instanceof String);
assertEquals(responseText, responseObject);
mc.close();
reconnect();
mc = createQueueConsumer(TEST_DESTINATION_NAME + ".request");
assertNotNull(mc);
mc.setMessageListener(new MyMessageListener(requestText, responseText));
responseObject = template.requestBody("direct:start", requestText);
assertNotNull(responseObject);
assertTrue(responseObject instanceof String);
assertEquals(responseText, responseObject);
mc.close();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.to("log:" + TEST_DESTINATION_NAME + ".in.log.1?showBody=true")
.to(ExchangePattern.InOut, "sjms:queue:" + TEST_DESTINATION_NAME + ".request" + "?replyTo="
+ TEST_DESTINATION_NAME + ".response")
.to("log:" + TEST_DESTINATION_NAME + ".out.log.1?showBody=true");
}
};
}
protected
|
ReconnectInOutProducerTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainResponse.java
|
{
"start": 923,
"end": 1771
}
|
class ____ extends ActionResponse implements ChunkedToXContentObject {
private final ClusterAllocationExplanation cae;
public ClusterAllocationExplainResponse(StreamInput in) throws IOException {
this.cae = new ClusterAllocationExplanation(in);
}
public ClusterAllocationExplainResponse(ClusterAllocationExplanation cae) {
this.cae = cae;
}
/**
* Return the explanation for shard allocation in the cluster
*/
public ClusterAllocationExplanation getExplanation() {
return this.cae;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
cae.writeTo(out);
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return cae.toXContentChunked(params);
}
}
|
ClusterAllocationExplainResponse
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/params/aggregator/AggregatorIntegrationTests.java
|
{
"start": 14250,
"end": 14661
}
|
class ____ {
static final List<String> output = new ArrayList<>();
@SuppressWarnings("JUnitMalformedDeclaration")
@ParameterizedTest
@ValueSource(ints = { 1, 2, 3 })
void testWithCountingConverterAggregator(@ConvertWith(InstanceCountingConverter.class) int i,
@AggregateWith(InstanceCountingAggregator.class) Object o) {
output.add("noisy test(" + i + ", " + o + ")");
}
}
}
|
CountingTestCase
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncOnExceptionFailureProcessorWithRedeliveryTest.java
|
{
"start": 1196,
"end": 3352
}
|
class ____ extends ContextTestSupport {
private static String beforeThreadName;
private static String afterThreadName;
@Test
public void testAsyncEndpoint() throws Exception {
getMockEndpoint("mock:error").expectedMessageCount(0);
getMockEndpoint("mock:result").expectedMessageCount(0);
try {
template.requestBody("direct:start", "Hello Camel", String.class);
fail("Should throw exception");
} catch (CamelExecutionException e) {
CamelExchangeException cause = assertIsInstanceOf(CamelExchangeException.class, e.getCause());
assertTrue(cause.getMessage().startsWith("Simulated error at attempt 1."));
}
assertMockEndpointsSatisfied();
assertFalse(beforeThreadName.equalsIgnoreCase(afterThreadName), "Should use different threads");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.addComponent("async", new MyAsyncComponent());
// use redelivery up till 5 times
errorHandler(defaultErrorHandler().maximumRedeliveries(5));
onException(IllegalArgumentException.class).handled(true).process(new Processor() {
public void process(Exchange exchange) {
beforeThreadName = Thread.currentThread().getName();
}
})
// invoking the async endpoint could also cause a failure so
// test that we can do redelivery
.to("async:bye:camel?failFirstAttempts=2").process(new Processor() {
public void process(Exchange exchange) {
afterThreadName = Thread.currentThread().getName();
}
}).to("mock:error");
from("direct:start").throwException(new IllegalArgumentException("Damn")).to("mock:result");
}
};
}
}
|
AsyncOnExceptionFailureProcessorWithRedeliveryTest
|
java
|
google__dagger
|
javatests/dagger/functional/factory/ConcreteModuleThatCouldBeAbstract.java
|
{
"start": 701,
"end": 812
}
|
class ____ {
@Provides
static double provideDouble() {
return 42.0;
}
}
|
ConcreteModuleThatCouldBeAbstract
|
java
|
apache__camel
|
core/camel-base/src/main/java/org/apache/camel/impl/converter/FutureTypeConverter.java
|
{
"start": 1248,
"end": 3238
}
|
class ____ extends TypeConverterSupport {
private static final Logger LOG = LoggerFactory.getLogger(FutureTypeConverter.class);
private final TypeConverter converter;
public FutureTypeConverter(TypeConverter converter) {
this.converter = converter;
}
@Override
public <T> T convertTo(Class<T> type, Exchange exchange, Object value) {
try {
return doConvertTo(type, exchange, value);
} catch (Exception e) {
throw new TypeConversionException(value, type, e);
}
}
@SuppressWarnings("unchecked")
private <T> T doConvertTo(Class<T> type, Exchange exchange, Object value) throws Exception {
// do not convert to stream cache
if (StreamCache.class.isAssignableFrom(value.getClass())) {
return null;
}
if (Future.class.isAssignableFrom(value.getClass())) {
Future<?> future = (Future<?>) value;
if (future.isCancelled()) {
// return void to indicate its not possible to convert at this time
return (T) MISS_VALUE;
}
// do some trace logging as the get is blocking until the response is ready
LOG.trace("Getting future response");
Object body = future.get();
LOG.trace("Got future response");
if (body == null) {
// return void to indicate its not possible to convert at this time
return (T) MISS_VALUE;
}
// maybe from is already the type we want
if (type.isAssignableFrom(body.getClass())) {
return type.cast(body);
} else if (body instanceof Exchange result) {
body = ExchangeHelper.extractResultBody(result, result.getPattern());
}
// no then convert to the type
return converter.convertTo(type, exchange, body);
}
return null;
}
}
|
FutureTypeConverter
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java
|
{
"start": 1764,
"end": 9579
}
|
class ____ extends AbstractQueryBuilder<BoostingQueryBuilder> {
public static final String NAME = "boosting";
private static final ParseField POSITIVE_FIELD = new ParseField("positive");
private static final ParseField NEGATIVE_FIELD = new ParseField("negative");
private static final ParseField NEGATIVE_BOOST_FIELD = new ParseField("negative_boost");
private final QueryBuilder positiveQuery;
private final QueryBuilder negativeQuery;
private float negativeBoost = -1;
/**
* Create a new {@link BoostingQueryBuilder}
*
* @param positiveQuery the positive query for this boosting query.
* @param negativeQuery the negative query for this boosting query.
*/
public BoostingQueryBuilder(QueryBuilder positiveQuery, QueryBuilder negativeQuery) {
if (positiveQuery == null) {
throw new IllegalArgumentException("inner clause [positive] cannot be null.");
}
if (negativeQuery == null) {
throw new IllegalArgumentException("inner clause [negative] cannot be null.");
}
this.positiveQuery = positiveQuery;
this.negativeQuery = negativeQuery;
}
/**
* Read from a stream.
*/
public BoostingQueryBuilder(StreamInput in) throws IOException {
super(in);
positiveQuery = in.readNamedWriteable(QueryBuilder.class);
negativeQuery = in.readNamedWriteable(QueryBuilder.class);
negativeBoost = in.readFloat();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(positiveQuery);
out.writeNamedWriteable(negativeQuery);
out.writeFloat(negativeBoost);
}
/**
* Get the positive query for this boosting query.
*/
public QueryBuilder positiveQuery() {
return this.positiveQuery;
}
/**
* Get the negative query for this boosting query.
*/
public QueryBuilder negativeQuery() {
return this.negativeQuery;
}
/**
* Set the negative boost factor.
*/
public BoostingQueryBuilder negativeBoost(float negativeBoost) {
if (negativeBoost < 0) {
throw new IllegalArgumentException("query requires negativeBoost to be set to positive value");
}
this.negativeBoost = negativeBoost;
return this;
}
/**
* Get the negative boost factor.
*/
public float negativeBoost() {
return this.negativeBoost;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(POSITIVE_FIELD.getPreferredName());
positiveQuery.toXContent(builder, params);
builder.field(NEGATIVE_FIELD.getPreferredName());
negativeQuery.toXContent(builder, params);
builder.field(NEGATIVE_BOOST_FIELD.getPreferredName(), negativeBoost);
printBoostAndQueryName(builder);
builder.endObject();
}
public static BoostingQueryBuilder fromXContent(XContentParser parser) throws IOException {
QueryBuilder positiveQuery = null;
boolean positiveQueryFound = false;
QueryBuilder negativeQuery = null;
boolean negativeQueryFound = false;
float boost = DEFAULT_BOOST;
float negativeBoost = -1;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (POSITIVE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
positiveQuery = parseInnerQueryBuilder(parser);
positiveQueryFound = true;
} else if (NEGATIVE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
negativeQuery = parseInnerQueryBuilder(parser);
negativeQueryFound = true;
} else {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (NEGATIVE_BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
negativeBoost = parser.floatValue();
} else if (NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
queryName = parser.text();
} else if (BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query does not support [" + currentFieldName + "]");
}
}
}
if (positiveQueryFound == false) {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query requires 'positive' query to be set'");
}
if (negativeQueryFound == false) {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query requires 'negative' query to be set'");
}
if (negativeBoost < 0) {
throw new ParsingException(
parser.getTokenLocation(),
"[boosting] query requires 'negative_boost' to be set to be a positive value'"
);
}
BoostingQueryBuilder boostingQuery = new BoostingQueryBuilder(positiveQuery, negativeQuery);
boostingQuery.negativeBoost(negativeBoost);
boostingQuery.boost(boost);
boostingQuery.queryName(queryName);
return boostingQuery;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
Query positive = positiveQuery.toQuery(context);
Query negative = negativeQuery.toQuery(context);
return FunctionScoreQuery.boostByQuery(positive, negative, negativeBoost);
}
@Override
protected int doHashCode() {
return Objects.hash(negativeBoost, positiveQuery, negativeQuery);
}
@Override
protected boolean doEquals(BoostingQueryBuilder other) {
return Objects.equals(negativeBoost, other.negativeBoost)
&& Objects.equals(positiveQuery, other.positiveQuery)
&& Objects.equals(negativeQuery, other.negativeQuery);
}
@Override
protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder positiveQuery = this.positiveQuery.rewrite(queryRewriteContext);
if (positiveQuery instanceof MatchNoneQueryBuilder) {
return positiveQuery;
}
QueryBuilder negativeQuery = this.negativeQuery.rewrite(queryRewriteContext);
if (positiveQuery != this.positiveQuery || negativeQuery != this.negativeQuery) {
BoostingQueryBuilder newQueryBuilder = new BoostingQueryBuilder(positiveQuery, negativeQuery);
newQueryBuilder.negativeBoost = negativeBoost;
return newQueryBuilder;
}
return this;
}
@Override
protected void extractInnerHitBuilders(Map<String, InnerHitContextBuilder> innerHits) {
InnerHitContextBuilder.extractInnerHits(positiveQuery, innerHits);
InnerHitContextBuilder.extractInnerHits(negativeQuery, innerHits);
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
}
|
BoostingQueryBuilder
|
java
|
apache__kafka
|
raft/src/main/java/org/apache/kafka/raft/internals/AddVoterHandlerState.java
|
{
"start": 1125,
"end": 2979
}
|
class ____ {
private final ReplicaKey voterKey;
private final Endpoints voterEndpoints;
private final boolean ackWhenCommitted;
private final Timer timeout;
private final CompletableFuture<AddRaftVoterResponseData> future = new CompletableFuture<>();
private OptionalLong lastOffset = OptionalLong.empty();
AddVoterHandlerState(
ReplicaKey voterKey,
Endpoints voterEndpoints,
boolean ackWhenCommitted,
Timer timeout
) {
this.voterKey = voterKey;
this.voterEndpoints = voterEndpoints;
this.ackWhenCommitted = ackWhenCommitted;
this.timeout = timeout;
}
public long timeUntilOperationExpiration(long currentTimeMs) {
timeout.update(currentTimeMs);
return timeout.remainingMs();
}
public boolean expectingApiResponse(int replicaId) {
return lastOffset.isEmpty() && replicaId == voterKey.id();
}
public void setLastOffset(long lastOffset) {
if (this.lastOffset.isPresent()) {
throw new IllegalStateException(
String.format(
"Cannot override last offset to %s for adding voter %s because it is " +
"already set to %s",
lastOffset,
voterKey,
this.lastOffset
)
);
}
this.lastOffset = OptionalLong.of(lastOffset);
}
public ReplicaKey voterKey() {
return voterKey;
}
public Endpoints voterEndpoints() {
return voterEndpoints;
}
public boolean ackWhenCommitted() {
return ackWhenCommitted;
}
public OptionalLong lastOffset() {
return lastOffset;
}
public CompletableFuture<AddRaftVoterResponseData> future() {
return future;
}
}
|
AddVoterHandlerState
|
java
|
apache__avro
|
lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectDoubleArrayTest.java
|
{
"start": 1650,
"end": 2410
}
|
class ____ {
private static final int ARRAY_SIZE = 10;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final double[] r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<double[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static
|
ReflectDoubleArrayTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/KeyedPage.java
|
{
"start": 5785,
"end": 5885
}
|
enum ____ {
KEY_OF_LAST_ON_PREVIOUS_PAGE,
KEY_OF_FIRST_ON_NEXT_PAGE,
NO_KEY
}
}
|
KeyInterpretation
|
java
|
google__guava
|
android/guava-testlib/test/com/google/common/testing/GcFinalizationTest.java
|
{
"start": 3894,
"end": 8064
}
|
class ____ extends Thread {
final AtomicBoolean shutdown;
Interruptenator(Thread interruptee) {
this(interruptee, new AtomicBoolean(false));
}
@SuppressWarnings("ThreadPriorityCheck") // TODO: b/175898629 - Consider onSpinWait.
Interruptenator(Thread interruptee, AtomicBoolean shutdown) {
super(
new Runnable() {
@Override
public void run() {
while (!shutdown.get()) {
interruptee.interrupt();
Thread.yield();
}
}
});
this.shutdown = shutdown;
start();
}
@SuppressWarnings("ThreadPriorityCheck") // TODO: b/175898629 - Consider onSpinWait.
void shutdown() {
shutdown.set(true);
while (this.isAlive()) {
Thread.yield();
}
}
}
void assertWrapsInterruptedException(RuntimeException e) {
assertThat(e).hasMessageThat().contains("Unexpected interrupt");
assertThat(e).hasCauseThat().isInstanceOf(InterruptedException.class);
}
public void testAwait_countDownLatch_interrupted() {
Interruptenator interruptenator = new Interruptenator(Thread.currentThread());
try {
CountDownLatch latch = new CountDownLatch(1);
RuntimeException expected =
assertThrows(RuntimeException.class, () -> GcFinalization.await(latch));
assertWrapsInterruptedException(expected);
} finally {
interruptenator.shutdown();
Thread.interrupted();
}
}
public void testAwaitDone_future_interrupted_interrupted() {
Interruptenator interruptenator = new Interruptenator(Thread.currentThread());
try {
SettableFuture<@Nullable Void> future = SettableFuture.create();
RuntimeException expected =
assertThrows(RuntimeException.class, () -> GcFinalization.awaitDone(future));
assertWrapsInterruptedException(expected);
} finally {
interruptenator.shutdown();
Thread.interrupted();
}
}
public void testAwaitClear_interrupted() {
Interruptenator interruptenator = new Interruptenator(Thread.currentThread());
try {
WeakReference<Object> ref = new WeakReference<Object>(Boolean.TRUE);
RuntimeException expected =
assertThrows(RuntimeException.class, () -> GcFinalization.awaitClear(ref));
assertWrapsInterruptedException(expected);
} finally {
interruptenator.shutdown();
Thread.interrupted();
}
}
public void testAwaitDone_finalizationPredicate_interrupted() {
Interruptenator interruptenator = new Interruptenator(Thread.currentThread());
try {
RuntimeException expected =
assertThrows(
RuntimeException.class,
() ->
GcFinalization.awaitDone(
new FinalizationPredicate() {
@Override
public boolean isDone() {
return false;
}
}));
assertWrapsInterruptedException(expected);
} finally {
interruptenator.shutdown();
Thread.interrupted();
}
}
/**
* awaitFullGc() is not quite as reliable a way to ensure calling of a specific finalize method as
* the more direct await* methods, but should be reliable enough in practice to avoid flakiness of
* this test. (And if it isn't, we'd like to know about it first!)
*/
public void testAwaitFullGc() {
CountDownLatch finalizerRan = new CountDownLatch(1);
WeakReference<Object> ref =
new WeakReference<Object>(
new Object() {
@SuppressWarnings({"removal", "Finalize"}) // b/260137033
@Override
protected void finalize() {
finalizerRan.countDown();
}
});
// Don't copy this into your own test!
// Use e.g. awaitClear or await(CountDownLatch) instead.
GcFinalization.awaitFullGc();
// Attempt to help with some flakiness that we've seen: b/387521512.
GcFinalization.awaitFullGc();
assertEquals(0, finalizerRan.getCount());
assertThat(ref.get()).isNull();
}
}
|
Interruptenator
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/map/MapAssert_containsExactly_with_Java_Util_MapEntry_Test.java
|
{
"start": 1003,
"end": 2133
}
|
class ____ extends MapAssertBaseTest {
final Map.Entry<String, String>[] entries = array(javaMapEntry("key1", "value1"), javaMapEntry("key2", "value2"));
@Override
protected MapAssert<Object, Object> invoke_api_method() {
return assertions.containsExactly(javaMapEntry("key1", "value1"), javaMapEntry("key2", "value2"));
}
@Override
protected void verify_internal_effects() {
verify(maps).assertContainsExactly(getInfo(assertions), getActual(assertions), entries, null);
}
@Test
void invoke_api_like_user() {
assertThat(map("key1", "value1", "key2", "value2")).containsExactly(javaMapEntry("key1", "value1"),
javaMapEntry("key2", "value2"));
}
@Test
void should_honor_custom_value_equals_when_comparing_entry_values() {
// GIVEN
var map = map("key1", "value1", "key2", "value2");
// WHEN/THEN
then(map).usingEqualsForValues(String::equalsIgnoreCase)
.containsExactly(javaMapEntry("key1", "VALUE1"), javaMapEntry("key2", "VALUE2"));
}
}
|
MapAssert_containsExactly_with_Java_Util_MapEntry_Test
|
java
|
google__guava
|
android/guava/src/com/google/common/util/concurrent/AbstractFutureState.java
|
{
"start": 25455,
"end": 29155
}
|
class ____ extends AtomicHelper {
static final Unsafe UNSAFE;
static final long LISTENERS_OFFSET;
static final long WAITERS_OFFSET;
static final long VALUE_OFFSET;
static final long WAITER_THREAD_OFFSET;
static final long WAITER_NEXT_OFFSET;
static {
Unsafe unsafe = null;
try {
unsafe = Unsafe.getUnsafe();
} catch (SecurityException tryReflectionInstead) {
try {
unsafe =
doPrivileged(
(PrivilegedExceptionAction<Unsafe>)
() -> {
Class<Unsafe> k = Unsafe.class;
for (Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x)) {
return k.cast(x);
}
}
throw new NoSuchFieldError("the Unsafe");
});
} catch (PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics", e.getCause());
}
}
try {
Class<?> abstractFutureState = AbstractFutureState.class;
WAITERS_OFFSET =
unsafe.objectFieldOffset(abstractFutureState.getDeclaredField("waitersField"));
LISTENERS_OFFSET =
unsafe.objectFieldOffset(abstractFutureState.getDeclaredField("listenersField"));
VALUE_OFFSET = unsafe.objectFieldOffset(abstractFutureState.getDeclaredField("valueField"));
WAITER_THREAD_OFFSET = unsafe.objectFieldOffset(Waiter.class.getDeclaredField("thread"));
WAITER_NEXT_OFFSET = unsafe.objectFieldOffset(Waiter.class.getDeclaredField("next"));
UNSAFE = unsafe;
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
@Override
void putThread(Waiter waiter, Thread newValue) {
UNSAFE.putObject(waiter, WAITER_THREAD_OFFSET, newValue);
}
@Override
void putNext(Waiter waiter, @Nullable Waiter newValue) {
UNSAFE.putObject(waiter, WAITER_NEXT_OFFSET, newValue);
}
@Override
boolean casWaiters(
AbstractFutureState<?> future, @Nullable Waiter expect, @Nullable Waiter update) {
return UNSAFE.compareAndSwapObject(future, WAITERS_OFFSET, expect, update);
}
@Override
boolean casListeners(
AbstractFutureState<?> future, @Nullable Listener expect, Listener update) {
return UNSAFE.compareAndSwapObject(future, LISTENERS_OFFSET, expect, update);
}
@Override
@Nullable Listener gasListeners(AbstractFutureState<?> future, Listener update) {
while (true) {
Listener listener = future.listenersField;
if (update == listener) {
return listener;
}
if (casListeners(future, listener, update)) {
return listener;
}
}
}
@Override
@Nullable Waiter gasWaiters(AbstractFutureState<?> future, Waiter update) {
while (true) {
Waiter waiter = future.waitersField;
if (update == waiter) {
return waiter;
}
if (casWaiters(future, waiter, update)) {
return waiter;
}
}
}
@Override
boolean casValue(AbstractFutureState<?> future, @Nullable Object expect, Object update) {
return UNSAFE.compareAndSwapObject(future, VALUE_OFFSET, expect, update);
}
@Override
String atomicHelperTypeForTest() {
return "UnsafeAtomicHelper";
}
}
/** {@link AtomicHelper} based on {@link AtomicReferenceFieldUpdater}. */
private static final
|
UnsafeAtomicHelper
|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/test/java/io/quarkus/gradle/ImageTasksWithConfigurationCacheTest.java
|
{
"start": 307,
"end": 2964
}
|
class ____ extends QuarkusGradleWrapperTestBase {
@Test
@DisabledOnOs(value = OS.WINDOWS, disabledReason = "cannot access the file because another process has locked a portion of the file")
public void shouldReuseConfigurationCacheImageBuildIfTheExtensionIsAdded() throws Exception {
File projectDir = getProjectDir("it-test-basic-project");
runGradleWrapper(projectDir, "addExtension", "--extensions=quarkus-container-image-docker");
BuildResult buildResult = runGradleWrapper(projectDir, "imageBuild");
assertThat(BuildResult.isSuccessful(buildResult.getTasks().get(":imageBuild"))).isTrue();
assertTrue(buildResult.getOutput().contains("Configuration cache entry stored"));
BuildResult buildResult3 = runGradleWrapper(projectDir, "imageBuild");
assertTrue(buildResult3.getOutput().contains("Reusing configuration cache."));
}
@Test
@DisabledOnOs(value = OS.WINDOWS, disabledReason = "cannot access the file because another process has locked a portion of the file")
public void shouldReuseConfigurationCacheWithProjectIsolationImageBuildIfTheExtensionIsAdded() throws Exception {
File projectDir = getProjectDir("it-test-basic-project");
runGradleWrapper(projectDir, "addExtension", "--extensions=quarkus-container-image-docker");
BuildResult buildResult = runGradleWrapper(projectDir, "imageBuild", "-Dorg.gradle.unsafe.isolated-projects=true");
assertThat(BuildResult.isSuccessful(buildResult.getTasks().get(":imageBuild"))).isTrue();
assertTrue(buildResult.getOutput().contains("Configuration cache entry stored"));
BuildResult buildResult3 = runGradleWrapper(projectDir, "imageBuild", "-Dorg.gradle.unsafe.isolated-projects=true");
assertTrue(buildResult3.getOutput().contains("Reusing configuration cache."));
}
@Test
public void shouldFailIfExtensionIsNotDefinedInTheBuild() throws Exception {
File projectDir = getProjectDir("it-test-basic-project");
BuildResult buildResultImageBuild = runGradleWrapper(true, projectDir, "clean", "imageBuild", "--no-build-cache");
assertTrue(buildResultImageBuild.getOutput()
.contains("Task: quarkusImageExtensionChecks requires extensions: quarkus-container-image-docker"));
BuildResult buildResultImagePush = runGradleWrapper(true, projectDir, "clean", "imagePush", "--no-build-cache");
assertTrue(buildResultImagePush.getOutput()
.contains("Task: quarkusImageExtensionChecks requires extensions: quarkus-container-image-docker"));
}
}
|
ImageTasksWithConfigurationCacheTest
|
java
|
google__dagger
|
dagger-spi/main/java/dagger/spi/model/DaggerTypeElement.java
|
{
"start": 905,
"end": 1422
}
|
class ____ {
/**
* Returns the Javac representation for the type element.
*
* @throws IllegalStateException if the current backend isn't Javac.
*/
public abstract TypeElement javac();
/**
* Returns the KSP representation for the type element.
*
* @throws IllegalStateException if the current backend isn't KSP.
*/
public abstract KSClassDeclaration ksp();
/** Returns the backend used in this compilation. */
public abstract DaggerProcessingEnv.Backend backend();
}
|
DaggerTypeElement
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/TaskAttempt.java
|
{
"start": 1325,
"end": 2966
}
|
interface ____ {
TaskAttemptId getID();
TaskAttemptReport getReport();
List<String> getDiagnostics();
Counters getCounters();
float getProgress();
Phase getPhase();
TaskAttemptState getState();
/**
* Has attempt reached the final state or not.
* @return true if it has finished, else false
*/
boolean isFinished();
/**
* @return the container ID if a container is assigned, otherwise null.
*/
ContainerId getAssignedContainerID();
/**
* @return container mgr address if a container is assigned, otherwise null.
*/
String getAssignedContainerMgrAddress();
/**
* @return node's id if a container is assigned, otherwise null.
*/
NodeId getNodeId();
/**
* @return node's http address if a container is assigned, otherwise null.
*/
String getNodeHttpAddress();
/**
* @return node's rack name if a container is assigned, otherwise null.
*/
String getNodeRackName();
/**
* @return time at which container is launched. If container is not launched
* yet, returns 0.
*/
long getLaunchTime();
/**
* @return attempt's finish time. If attempt is not finished
* yet, returns 0.
*/
long getFinishTime();
/**
* @return The attempt's shuffle finish time if the attempt is a reduce. If
* attempt is not finished yet, returns 0.
*/
long getShuffleFinishTime();
/**
* @return The attempt's sort or merge finish time if the attempt is a reduce.
* If attempt is not finished yet, returns 0.
*/
long getSortFinishTime();
/**
* @return the port shuffle is on.
*/
public int getShufflePort();
}
|
TaskAttempt
|
java
|
mockito__mockito
|
mockito-extensions/mockito-errorprone/src/main/java/org/mockito/errorprone/bugpatterns/MockitoNotExtensible.java
|
{
"start": 1384,
"end": 2000
}
|
class ____ extends BugChecker implements ClassTreeMatcher {
@Override
public Description matchClass(ClassTree tree, VisitorState state) {
if (tree.getImplementsClause().stream()
.anyMatch(
implementing ->
ASTHelpers.hasAnnotation(
ASTHelpers.getSymbol(implementing),
"org.mockito.NotExtensible",
state))) {
return describeMatch(tree);
}
return NO_MATCH;
}
}
|
MockitoNotExtensible
|
java
|
google__guice
|
core/test/com/google/inject/MethodInterceptionTest.java
|
{
"start": 2895,
"end": 13101
}
|
class ____ implements MethodInterceptor {
@Override
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
return methodInvocation.proceed();
}
}
@Before
public void checkBytecodeGenIsEnabled() {
assumeTrue(InternalFlags.isBytecodeGenEnabled());
}
@Test
public void testSharedProxyClasses() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(
Matchers.any(), Matchers.returns(only(Foo.class)), new ReturnNullInterceptor());
}
});
Injector childOne =
injector.createChildInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Interceptable.class);
}
});
Interceptable nullFoosOne = childOne.getInstance(Interceptable.class);
assertNotNull(nullFoosOne.bar());
assertNull(nullFoosOne.foo()); // confirm it's being intercepted
Injector childTwo =
injector.createChildInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Interceptable.class);
}
});
Interceptable nullFoosTwo = childTwo.getInstance(Interceptable.class);
assertNull(nullFoosTwo.foo()); // confirm it's being intercepted
assertSame(
"Child injectors should share proxy classes, otherwise memory leaks!",
nullFoosOne.getClass(),
nullFoosTwo.getClass());
Injector injector2 =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(
Matchers.any(), Matchers.returns(only(Foo.class)), new ReturnNullInterceptor());
}
});
Interceptable separateNullFoos = injector2.getInstance(Interceptable.class);
assertNull(separateNullFoos.foo()); // confirm it's being intercepted
assertSame(
"different injectors should share proxy classes, otherwise memory leaks!",
nullFoosOne.getClass(),
separateNullFoos.getClass());
}
@Test
public void testGetThis() {
final AtomicReference<Object> lastTarget = new AtomicReference<>();
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(
Matchers.any(),
Matchers.any(),
new MethodInterceptor() {
@Override
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
lastTarget.set(methodInvocation.getThis());
return methodInvocation.proceed();
}
});
}
});
Interceptable interceptable = injector.getInstance(Interceptable.class);
interceptable.foo();
assertSame(interceptable, lastTarget.get());
}
@Test
public void testInterceptingFinalClass() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(
Matchers.any(),
Matchers.any(),
new MethodInterceptor() {
@Override
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
return methodInvocation.proceed();
}
});
}
});
try {
injector.getInstance(NotInterceptable.class);
fail();
} catch (ConfigurationException ce) {
assertEquals(
"Unable to method intercept: " + NotInterceptable.class.getName(),
Iterables.getOnlyElement(ce.getErrorMessages()).getMessage().toString());
assertEquals(
"Cannot subclass final class " + NotInterceptable.class.getName(),
ce.getCause().getMessage());
}
}
@Test
public void testSpiAccessToInterceptors() throws NoSuchMethodException {
final MethodInterceptor countingInterceptor = new CountingInterceptor();
final MethodInterceptor returnNullInterceptor = new ReturnNullInterceptor();
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(
Matchers.any(), Matchers.returns(only(Foo.class)), countingInterceptor);
bindInterceptor(
Matchers.any(),
Matchers.returns(only(Foo.class).or(only(Bar.class))),
returnNullInterceptor);
}
});
ConstructorBinding<?> interceptedBinding =
(ConstructorBinding<?>) injector.getBinding(Interceptable.class);
Method barMethod = Interceptable.class.getMethod("bar");
Method fooMethod = Interceptable.class.getMethod("foo");
assertEquals(
ImmutableMap.<Method, List<MethodInterceptor>>of(
fooMethod, ImmutableList.of(countingInterceptor, returnNullInterceptor),
barMethod, ImmutableList.of(returnNullInterceptor)),
interceptedBinding.getMethodInterceptors());
ConstructorBinding<?> nonInterceptedBinding =
(ConstructorBinding<?>) injector.getBinding(Foo.class);
assertEquals(
ImmutableMap.<Method, List<MethodInterceptor>>of(),
nonInterceptedBinding.getMethodInterceptors());
injector.getInstance(Interceptable.class).foo();
assertEquals("expected counting interceptor to be invoked first", 1, count.get());
}
@Test
public void testGetElements_interceptorBindings() throws Exception {
@SuppressWarnings("rawtypes")
Matcher<Class> classMatcher = Matchers.subclassesOf(List.class);
Matcher<Method> methodMatcher = Matchers.returns(Matchers.identicalTo(int.class));
MethodInterceptor interceptor =
new MethodInterceptor() {
@Override
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
return null;
}
};
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(classMatcher, methodMatcher, interceptor);
}
});
final List<InterceptorBinding> interceptorBindings = new ArrayList<>();
for (Element element : injector.getElements()) {
element.acceptVisitor(
new DefaultElementVisitor<Void>() {
@Override
public Void visit(InterceptorBinding interceptorBinding) {
interceptorBindings.add(interceptorBinding);
return null;
}
});
}
assertThat(interceptorBindings).hasSize(1);
InterceptorBinding extractedBinding = interceptorBindings.get(0);
assertSame(classMatcher, extractedBinding.getClassMatcher());
assertSame(methodMatcher, extractedBinding.getMethodMatcher());
assertSame(interceptor, extractedBinding.getInterceptors().get(0));
}
@Test
public void testInterceptedMethodThrows() throws Exception {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(Matchers.any(), Matchers.any(), new CountingInterceptor());
bindInterceptor(Matchers.any(), Matchers.any(), new CountingInterceptor());
}
});
Interceptable interceptable = injector.getInstance(Interceptable.class);
try {
interceptable.explode();
fail();
} catch (Exception e) {
// validate all causes.
for (Throwable t = e; t != null; t = t.getCause()) {
StackTraceElement[] stackTraceElement = t.getStackTrace();
int frame = 0;
assertEquals("explode", stackTraceElement[frame++].getMethodName());
while (isLambdaFrame(stackTraceElement[frame])) {
frame++; // ignore lambda frames when running tests with ShowHiddenFrames
}
assertEquals("invoke", stackTraceElement[frame++].getMethodName());
assertEquals("invoke", stackTraceElement[frame++].getMethodName());
assertEquals("testInterceptedMethodThrows", stackTraceElement[frame++].getMethodName());
}
}
}
private static boolean isLambdaFrame(StackTraceElement element) {
var name = element.getClassName();
return name.startsWith("java.lang.invoke.LambdaForm")
|| name.startsWith("java.lang.invoke.DirectMethodHandle");
}
@Test
public void testNotInterceptedMethodsInInterceptedClassDontAddFrames() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(
Matchers.any(), Matchers.returns(only(Foo.class)), new NoOpInterceptor());
}
});
Interceptable interceptable = injector.getInstance(Interceptable.class);
assertNull(interceptable.lastElements);
interceptable.foo();
boolean proxyFrameFound = false;
for (int i = 0; i < interceptable.lastElements.length; i++) {
if (interceptable.lastElements[i].toString().contains("$EnhancerByGuice$")) {
proxyFrameFound = true;
break;
}
}
assertTrue(Arrays.toString(interceptable.lastElements), proxyFrameFound);
proxyFrameFound = false;
interceptable.bar();
for (int i = 0; i < interceptable.lastElements.length; i++) {
if (interceptable.lastElements[i].toString().contains("$EnhancerByGuice$")) {
proxyFrameFound = true;
break;
}
}
assertFalse(Arrays.toString(interceptable.lastElements), proxyFrameFound);
}
public static
|
NoOpInterceptor
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/authentication/rememberme/AbstractRememberMeServicesTests.java
|
{
"start": 18571,
"end": 19571
}
|
class ____ extends AbstractRememberMeServices {
boolean loginSuccessCalled;
MockRememberMeServices(String key, UserDetailsService userDetailsService) {
super(key, userDetailsService);
}
MockRememberMeServices(UserDetailsService userDetailsService) {
super("xxxx", userDetailsService);
}
MockRememberMeServices() {
this(new MockUserDetailsService(null, false));
}
@Override
protected void onLoginSuccess(HttpServletRequest request, HttpServletResponse response,
Authentication successfulAuthentication) {
this.loginSuccessCalled = true;
}
@Override
protected UserDetails processAutoLoginCookie(String[] cookieTokens, HttpServletRequest request,
HttpServletResponse response) throws RememberMeAuthenticationException {
if (cookieTokens.length != 3) {
throw new InvalidCookieException("deliberate exception");
}
UserDetails user = getUserDetailsService().loadUserByUsername("joe");
return user;
}
}
public static
|
MockRememberMeServices
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/util/Throwables_getRootCause_Test.java
|
{
"start": 844,
"end": 1574
}
|
class ____ {
@Test
void should_return_null_if_throwable_has_no_cause() {
assertThat(Throwables.getRootCause(new Throwable())).isNull();
}
@Test
void should_return_cause_when_throwable_has_cause() {
IllegalArgumentException expectedCause = new IllegalArgumentException();
assertThat(Throwables.getRootCause(new Throwable(expectedCause))).isSameAs(expectedCause);
}
@Test
void should_return_root_cause_when_throwable_has_cause_which_has_cause() {
NullPointerException expectedCause = new NullPointerException();
Throwable error = new Throwable(new IllegalArgumentException(expectedCause));
assertThat(Throwables.getRootCause(error)).isSameAs(expectedCause);
}
}
|
Throwables_getRootCause_Test
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamProcessorApiTest.java
|
{
"start": 1969,
"end": 4174
}
|
class ____ {
@Test
void shouldGetStateStoreWithConnectedStoreProvider() {
runTest(false);
}
@Test
void shouldGetStateStoreWithStreamBuilder() {
runTest(true);
}
private void runTest(final boolean shouldAddStoreDirectly) {
final StreamsBuilder builder = new StreamsBuilder();
final StoreBuilder<?> storeBuilder = Stores.keyValueStoreBuilder(Stores.inMemoryKeyValueStore("store"), Serdes.String(), Serdes.String());
if (shouldAddStoreDirectly) {
builder.addStateStore(storeBuilder);
}
builder.stream("input", Consumed.with(Serdes.String(), Serdes.String()))
.processValues(new TransformerSupplier(shouldAddStoreDirectly ? null : storeBuilder), "store")
.to("output", Produced.with(Serdes.String(), Serdes.String()));
final List<KeyValue<String, String>> words = Arrays.asList(KeyValue.pair("a", "foo"), KeyValue.pair("b", "bar"), KeyValue.pair("c", "baz"));
try (TopologyTestDriver testDriver = new TopologyTestDriver(builder.build())) {
final TestInputTopic<String, String>
testDriverInputTopic =
testDriver.createInputTopic("input", Serdes.String().serializer(), Serdes.String().serializer());
words.forEach(clk -> testDriverInputTopic.pipeInput(clk.key, clk.value));
final List<String> expectedOutput = asList("fooUpdated", "barUpdated", "bazUpdated");
final Deserializer<String> keyDeserializer = Serdes.String().deserializer();
final List<String> actualOutput =
new ArrayList<>(testDriver.createOutputTopic("output", keyDeserializer, Serdes.String().deserializer()).readValuesToList());
final KeyValueStore<String, String> stateStore = testDriver.getKeyValueStore("store");
Assertions.assertEquals(expectedOutput, actualOutput);
Assertions.assertEquals("fooUpdated", stateStore.get("a"));
Assertions.assertEquals("barUpdated", stateStore.get("b"));
Assertions.assertEquals("bazUpdated", stateStore.get("c"));
}
}
private static
|
KStreamProcessorApiTest
|
java
|
grpc__grpc-java
|
core/src/testFixtures/java/io/grpc/internal/TestUtils.java
|
{
"start": 2533,
"end": 5413
}
|
class ____ {
/**
* A mock transport created by the mock transport factory.
*/
final ConnectionClientTransport transport;
/**
* The listener passed to the start() of the mock transport.
*/
final ManagedClientTransport.Listener listener;
MockClientTransportInfo(ConnectionClientTransport transport,
ManagedClientTransport.Listener listener) {
this.transport = transport;
this.listener = listener;
}
}
/**
* Stub the given mock {@link ClientTransportFactory} by returning mock
* {@link ManagedClientTransport}s which saves their listeners along with them. This method
* returns a list of {@link MockClientTransportInfo}, each of which is a started mock transport
* and its listener.
*/
static BlockingQueue<MockClientTransportInfo> captureTransports(
ClientTransportFactory mockTransportFactory) {
return captureTransports(mockTransportFactory, null);
}
static BlockingQueue<MockClientTransportInfo> captureTransports(
ClientTransportFactory mockTransportFactory, @Nullable final Runnable startRunnable) {
final BlockingQueue<MockClientTransportInfo> captor =
new LinkedBlockingQueue<>();
doAnswer(new Answer<ConnectionClientTransport>() {
@Override
public ConnectionClientTransport answer(InvocationOnMock invocation) throws Throwable {
final ConnectionClientTransport mockTransport = mock(ConnectionClientTransport.class);
when(mockTransport.getLogId())
.thenReturn(InternalLogId.allocate("mocktransport", /*details=*/ null));
when(mockTransport.newStream(
any(MethodDescriptor.class), any(Metadata.class), any(CallOptions.class),
ArgumentMatchers.<ClientStreamTracer[]>any()))
.thenReturn(mock(ClientStream.class));
// Save the listener
doAnswer(new Answer<Runnable>() {
@Override
public Runnable answer(InvocationOnMock invocation) throws Throwable {
captor.add(new MockClientTransportInfo(
mockTransport, (ManagedClientTransport.Listener) invocation.getArguments()[0]));
return startRunnable;
}
}).when(mockTransport).start(any(ManagedClientTransport.Listener.class));
return mockTransport;
}
}).when(mockTransportFactory)
.newClientTransport(
any(SocketAddress.class),
any(ClientTransportFactory.ClientTransportOptions.class),
any(ChannelLogger.class));
return captor;
}
@SuppressWarnings("ReferenceEquality")
public static EquivalentAddressGroup stripAttrs(EquivalentAddressGroup eag) {
if (eag.getAttributes() == Attributes.EMPTY) {
return eag;
}
return new EquivalentAddressGroup(eag.getAddresses());
}
private TestUtils() {
}
public static
|
MockClientTransportInfo
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/api/AssertIterableEqualsAssertionsTests.java
|
{
"start": 1204,
"end": 16974
}
|
class ____ {
@Test
void assertIterableEqualsEqualToSelf() {
List<Object> list = listOf("a", 'b', 1, 2);
assertIterableEquals(list, list);
assertIterableEquals(list, list, "message");
assertIterableEquals(list, list, () -> "message");
Set<Object> set = setOf("a", 'b', 1, 2);
assertIterableEquals(set, set);
}
@Test
void assertIterableEqualsEqualObjectsOfSameType() {
assertIterableEquals(listOf(), listOf());
assertIterableEquals(listOf("abc"), listOf("abc"));
assertIterableEquals(listOf("abc", 1, 2L, 3D), listOf("abc", 1, 2L, 3D));
assertIterableEquals(setOf(), setOf());
assertIterableEquals(setOf("abc"), setOf("abc"));
assertIterableEquals(setOf("abc", 1, 2L, 3D), setOf("abc", 1, 2L, 3D));
}
@Test
void assertIterableEqualsNestedIterables() {
assertIterableEquals(listOf(listOf(listOf())), listOf(listOf(listOf())));
assertIterableEquals(setOf(setOf(setOf())), setOf(setOf(setOf())));
}
@Test
void assertIterableEqualsNestedIterablesWithNull() {
assertIterableEquals(listOf(null, listOf(null, listOf(null, null)), null, listOf((List<Object>) null)),
listOf(null, listOf(null, listOf(null, null)), null, listOf((List<Object>) null)));
assertIterableEquals(setOf(null, setOf(null, setOf(null, null)), null, setOf((Set<Object>) null)),
setOf(null, setOf(null, setOf(null, null)), null, setOf((Set<Object>) null)));
}
@Test
void assertIterableEqualsNestedIterablesWithStrings() {
assertIterableEquals(listOf("a", listOf(listOf("b", listOf("c", "d"))), "e"),
listOf("a", listOf(listOf("b", listOf("c", "d"))), "e"));
assertIterableEquals(setOf("a", setOf(setOf("b", setOf("c", "d"))), "e"),
setOf("a", setOf(setOf("b", setOf("c", "d"))), "e"));
}
@Test
void assertIterableEqualsNestedIterablesWithIntegers() {
assertIterableEquals(listOf(listOf(1), listOf(2), listOf(listOf(3, listOf(4)))),
listOf(listOf(1), listOf(2), listOf(listOf(3, listOf(4)))));
assertIterableEquals(setOf(setOf(1), setOf(2), setOf(setOf(3, setOf(4)))),
setOf(setOf(1), setOf(2), setOf(setOf(3, setOf(4)))));
assertIterableEquals(listOf(listOf(1), listOf(listOf(1))), setOf(setOf(1), setOf(setOf(1))));
}
@Test
void assertIterableEqualsNestedIterablesWithDeeplyNestedObject() {
assertIterableEquals(listOf(listOf(listOf(listOf(listOf(listOf(listOf("abc"))))))),
listOf(listOf(listOf(listOf(listOf(listOf(listOf("abc"))))))));
assertIterableEquals(setOf(setOf(setOf(setOf(setOf(setOf(setOf("abc"))))))),
setOf(setOf(setOf(setOf(setOf(setOf(setOf("abc"))))))));
}
@Test
void assertIterableEqualsNestedIterablesWithNaN() {
assertIterableEquals(listOf(null, listOf(null, Double.NaN, listOf(Float.NaN, null, listOf()))),
listOf(null, listOf(null, Double.NaN, listOf(Float.NaN, null, listOf()))));
assertIterableEquals(setOf(null, setOf(null, Double.NaN, setOf(Float.NaN, null, setOf()))),
setOf(null, setOf(null, Double.NaN, setOf(Float.NaN, null, setOf()))));
}
@Test
void assertIterableEqualsNestedIterablesWithObjectsOfDifferentTypes() {
assertIterableEquals(listOf(new String("a"), Integer.valueOf(1), listOf(Double.parseDouble("1.1"), "b")),
listOf(new String("a"), Integer.valueOf(1), listOf(Double.parseDouble("1.1"), "b")));
assertIterableEquals(setOf(new String("a"), Integer.valueOf(1), setOf(Double.parseDouble("1.1"), "b")),
setOf(new String("a"), Integer.valueOf(1), setOf(Double.parseDouble("1.1"), "b")));
}
@Test
void assertIterableEqualsNestedIterablesOfMixedSubtypes() {
assertIterableEquals(
listOf(1, 2, listOf(3, setOf(4, 5), setOf(6L), listOf(listOf(setOf(7)))), setOf(8), listOf(setOf(9L))),
listOf(1, 2, listOf(3, setOf(4, 5), setOf(6L), listOf(listOf(setOf(7)))), setOf(8), listOf(setOf(9L))));
assertIterableEquals(
listOf("a", setOf('b', 'c'), setOf((int) 'd'), listOf(listOf(listOf("ef"), listOf(listOf("ghi"))))),
setOf("a", listOf('b', 'c'), listOf((int) 'd'), setOf(setOf(setOf("ef"), setOf(setOf("ghi"))))));
}
@Test
void assertIterableEqualsIterableVsNull() {
try {
assertIterableEquals(null, listOf("a", "b", 1, listOf()));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "expected iterable was <null>");
}
try {
assertIterableEquals(listOf('a', 1, new Object(), 10L), null);
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "actual iterable was <null>");
}
}
@Test
void assertIterableEqualsNestedIterableVsNull() {
try {
assertIterableEquals(listOf(listOf(), 1, "2", setOf('3', listOf((List<Object>) null))),
listOf(listOf(), 1, "2", setOf('3', listOf(listOf("4")))));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "expected iterable was <null> at index [3][1][0]");
}
try {
assertIterableEquals(setOf(1, 2, listOf(3, listOf("4", setOf(5, setOf(6)))), "7"),
setOf(1, 2, listOf(3, listOf("4", setOf(5, null))), "7"));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "actual iterable was <null> at index [2][1][1][1]");
}
}
@Test
void assertIterableEqualsIterableVsNullAndMessage() {
try {
assertIterableEquals(null, listOf('a', "b", 10, 20D), "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "expected iterable was <null>");
}
try {
assertIterableEquals(listOf("hello", 42), null, "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "actual iterable was <null>");
}
}
@Test
void assertIterableEqualsNestedIterableVsNullAndMessage() {
try {
assertIterableEquals(listOf(1, listOf(2, 3, listOf(4, 5, listOf((List<Object>) null)))),
listOf(1, listOf(2, 3, listOf(4, 5, listOf(listOf(6))))), "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "expected iterable was <null> at index [1][2][2][0]");
}
try {
assertIterableEquals(listOf(1, listOf(2, listOf(3, listOf(listOf(4))))),
listOf(1, listOf(2, listOf(3, listOf((List<Object>) null)))), "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "actual iterable was <null> at index [1][1][1][0]");
}
}
@Test
void assertIterableEqualsIterableVsNullAndMessageSupplier() {
try {
assertIterableEquals(null, setOf(42, "42", listOf(42F), 42D), () -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "expected iterable was <null>");
}
try {
assertIterableEquals(listOf(listOf("a"), listOf()), null, () -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "actual iterable was <null>");
}
}
@Test
void assertIterableEqualsNestedIterableVsNullAndMessageSupplier() {
try {
assertIterableEquals(listOf("1", "2", "3", listOf("4", listOf((List<Object>) null))),
listOf("1", "2", "3", listOf("4", listOf(listOf(5)))), () -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "expected iterable was <null> at index [3][1][0]");
}
try {
assertIterableEquals(setOf(1, 2, setOf("3", setOf('4', setOf(5, 6, setOf())))),
setOf(1, 2, setOf("3", setOf('4', setOf(5, 6, null)))), () -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "actual iterable was <null> at index [2][1][1][2]");
}
}
@Test
void assertIterableEqualsIterablesOfDifferentLength() {
try {
assertIterableEquals(listOf('a', "b", 'c'), listOf('a', "b", 'c', 1));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "iterable lengths differ, expected: <3> but was: <4>");
}
}
@Test
void assertIterableEqualsNestedIterablesOfDifferentLength() {
try {
assertIterableEquals(listOf("a", setOf("b", listOf("c", "d", setOf("e", 1, 2, 3)))),
listOf("a", setOf("b", listOf("c", "d", setOf("e", 1, 2, 3, 4, 5)))));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "iterable lengths differ at index [1][1][2], expected: <4> but was: <6>");
}
try {
assertIterableEquals(listOf(listOf(listOf(listOf(listOf(listOf(listOf('a'))))))),
listOf(listOf(listOf(listOf(listOf(listOf(listOf('a', 'b'))))))));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "iterable lengths differ at index [0][0][0][0][0][0], expected: <1> but was: <2>");
}
}
@Test
void assertIterableEqualsIterablesOfDifferentLengthAndMessage() {
try {
assertIterableEquals(setOf('a', 1), setOf('a', 1, new Object()), "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable lengths differ, expected: <2> but was: <3>");
}
}
@Test
void assertIterableEqualsNestedIterablesOfDifferentLengthAndMessage() {
try {
assertIterableEquals(listOf('a', 1, listOf(2, 3)), listOf('a', 1, listOf(2, 3, 4, 5)), "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable lengths differ at index [2], expected: <2> but was: <4>");
}
}
@Test
void assertIterableEqualsIterablesOfDifferentLengthAndMessageSupplier() {
try {
assertIterableEquals(setOf("a", "b", "c"), setOf("a", "b", "c", "d", "e", "f"), () -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable lengths differ, expected: <3> but was: <6>");
}
}
@Test
void assertIterableEqualsNestedIterablesOfDifferentLengthAndMessageSupplier() {
try {
assertIterableEquals(listOf("a", setOf(1, 2, 3, listOf(4.0, 5.1, 6.1), 7)),
listOf("a", setOf(1, 2, 3, listOf(4.0, 5.1, 6.1, 7.0), 8)), () -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable lengths differ at index [1][3], expected: <3> but was: <4>");
}
}
@Test
void assertIterableEqualsDifferentIterables() {
try {
assertIterableEquals(listOf(1L, "2", '3', 4, 5D), listOf(1L, "2", '9', 4, 5D));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "iterable contents differ at index [2], expected: <3> but was: <9>");
}
try {
assertIterableEquals(listOf("a", 10, 11, 12, Double.NaN), listOf("a", 10, 11, 12, 13.55D));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "iterable contents differ at index [4], expected: <NaN> but was: <13.55>");
}
}
@Test
void assertIterableEqualsDifferentNestedIterables() {
try {
assertIterableEquals(listOf(1, 2, listOf(3, listOf(4, listOf(false, true)))),
listOf(1, 2, listOf(3, listOf(4, listOf(true, false)))));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex,
"iterable contents differ at index [2][1][1][0], expected: <false> but was: <true>");
}
List<Object> differentElement = listOf();
try {
assertIterableEquals(listOf(1, 2, 3, listOf(listOf(4, listOf(5)))),
listOf(1, 2, 3, listOf(listOf(4, listOf(differentElement)))));
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex,
"iterable contents differ at index [3][0][1][0], expected: <5> but was: <" + differentElement + ">");
}
}
@Test
void assertIterableEqualsDifferentIterablesAndMessage() {
try {
assertIterableEquals(listOf(1.1D, 2L, "3"), listOf(1D, 2L, "3"), "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable contents differ at index [0], expected: <1.1> but was: <1.0>");
}
}
@Test
void assertIterableEqualsDifferentNestedIterablesAndMessage() {
try {
assertIterableEquals(listOf(9, 8, '6', listOf(5, 4, "3", listOf("2", '1'))),
listOf(9, 8, '6', listOf(5, 4, "3", listOf("99", '1'))), "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable contents differ at index [3][3][0], expected: <2> but was: <99>");
}
try {
assertIterableEquals(listOf(9, 8, '6', listOf(5, 4, "3", listOf("2", "1"))),
listOf(9, 8, '6', listOf(5, 4, "3", listOf("99", "1"))), "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable contents differ at index [3][3][0], expected: <2> but was: <99>");
}
}
@Test
void assertIterableEqualsDifferentIterablesAndMessageSupplier() {
try {
assertIterableEquals(setOf("one", 1L, Double.MIN_VALUE, "abc"), setOf("one", 1L, 42.42, "abc"),
() -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable contents differ at index [2], expected: <4.9E-324> but was: <42.42>");
}
}
@Test
void assertIterableEqualsDifferentNestedIterablesAndMessageSupplier() {
try {
assertIterableEquals(setOf("one", 1L, setOf("a", 'b', setOf(1, setOf(2, 3))), "abc"),
setOf("one", 1L, setOf("a", 'b', setOf(1, setOf(2, 4))), "abc"), () -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable contents differ at index [2][2][1][1], expected: <3> but was: <4>");
}
try {
assertIterableEquals(listOf("j", listOf("a"), setOf(42), "ab", setOf(1, listOf(3))),
listOf("j", listOf("a"), setOf(42), "ab", setOf(1, listOf(5))), () -> "message");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageStartsWith(ex, "message");
assertMessageEndsWith(ex, "iterable contents differ at index [4][1][0], expected: <3> but was: <5>");
}
}
@Test
// https://github.com/junit-team/junit-framework/issues/2157
void assertIterableEqualsWithListOfPath() {
var expected = listOf(Path.of("1"));
var actual = listOf(Path.of("1"));
assertDoesNotThrow(() -> assertIterableEquals(expected, actual));
}
@Test
void assertIterableEqualsThrowsStackOverflowErrorForInterlockedRecursiveStructures() {
var expected = new ArrayList<>();
var actual = new ArrayList<>();
actual.add(expected);
expected.add(actual);
assertThrows(StackOverflowError.class, () -> assertIterableEquals(expected, actual));
}
@Test
// https://github.com/junit-team/junit-framework/issues/2915
void assertIterableEqualsWithDifferentListOfPath() {
try {
var expected = listOf(Path.of("1").resolve("2"));
var actual = listOf(Path.of("1").resolve("3"));
assertIterableEquals(expected, actual);
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "iterable contents differ at index [0][1], expected: <2> but was: <3>");
}
}
}
|
AssertIterableEqualsAssertionsTests
|
java
|
jhy__jsoup
|
src/test/java/org/jsoup/parser/ParserSettingsTest.java
|
{
"start": 213,
"end": 1814
}
|
class ____ {
@MultiLocaleTest
public void caseSupport(Locale locale) {
Locale.setDefault(locale);
ParseSettings bothOn = new ParseSettings(true, true);
ParseSettings bothOff = new ParseSettings(false, false);
ParseSettings tagOn = new ParseSettings(true, false);
ParseSettings attrOn = new ParseSettings(false, true);
assertEquals("IMG", bothOn.normalizeTag("IMG"));
assertEquals("ID", bothOn.normalizeAttribute("ID"));
assertEquals("img", bothOff.normalizeTag("IMG"));
assertEquals("id", bothOff.normalizeAttribute("ID"));
assertEquals("IMG", tagOn.normalizeTag("IMG"));
assertEquals("id", tagOn.normalizeAttribute("ID"));
assertEquals("img", attrOn.normalizeTag("IMG"));
assertEquals("ID", attrOn.normalizeAttribute("ID"));
}
@MultiLocaleTest
public void attributeCaseNormalization(Locale locale) {
Locale.setDefault(locale);
ParseSettings parseSettings = new ParseSettings(false, false);
String normalizedAttribute = parseSettings.normalizeAttribute("HIDDEN");
assertEquals("hidden", normalizedAttribute);
}
@MultiLocaleTest
public void attributesCaseNormalization(Locale locale) {
Locale.setDefault(locale);
ParseSettings parseSettings = new ParseSettings(false, false);
Attributes attributes = new Attributes();
attributes.put("ITEM", "1");
parseSettings.normalizeAttributes(attributes);
assertEquals("item", attributes.asList().get(0).getKey());
}
}
|
ParserSettingsTest
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/telemetry/internals/MetricKeyable.java
|
{
"start": 1024,
"end": 1151
}
|
interface ____ {
/**
* @return The {@code MetricKey} for respective metric.
*/
MetricKey key();
}
|
MetricKeyable
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java
|
{
"start": 3350,
"end": 15022
}
|
class ____ extends UnifiedHighlighter {
private static boolean isKnnQuery(Query q) {
// TODO: bug https://github.com/elastic/elasticsearch/issues/136427
// AbstractKnnVectorQuery is not public, so we need to list concrete classes
// currently there is a bug in Lucene that causes things that use
// AbstractKnnVectorQuery to throw NPEs in weight matches highlighting
return q instanceof KnnScoreDocQuery
|| q instanceof RescoreKnnVectorQuery
|| q instanceof VectorSimilarityQuery
|| q instanceof ESKnnFloatVectorQuery
|| q instanceof ESKnnByteVectorQuery
|| q instanceof ESDiversifyingChildrenByteKnnVectorQuery
|| q instanceof ESDiversifyingChildrenFloatKnnVectorQuery;
}
public static final char MULTIVAL_SEP_CHAR = (char) 0;
private static final Snippet[] EMPTY_SNIPPET = new Snippet[0];
private final OffsetSource offsetSource;
private final String index;
private final String field;
private final Locale breakIteratorLocale;
private final int noMatchSize;
private final CustomFieldHighlighter fieldHighlighter;
private final int maxAnalyzedOffset;
private final QueryMaxAnalyzedOffset queryMaxAnalyzedOffset;
/**
* Creates a new instance of {@link CustomUnifiedHighlighter}
*
* @param builder the {@link UnifiedHighlighter.Builder} for the underlying highlighter.
* @param offsetSource the {@link OffsetSource} to used for offsets retrieval.
* @param breakIteratorLocale the {@link Locale} to use for dividing text into passages.
* If null {@link Locale#ROOT} is used.
* @param index the index we're highlighting, mostly used for error messages
* @param field the name of the field we're highlighting
* @param query the query we're highlighting
* @param noMatchSize The size of the text that should be returned when no highlighting can be performed.
* @param maxPassages the maximum number of passes to highlight
* @param maxAnalyzedOffset if the field is more than this long we'll refuse to use the ANALYZED
* offset source for it because it'd be super slow
* @param weightMatchesEnabled whether the {@link HighlightFlag#WEIGHT_MATCHES} should be enabled
*/
public CustomUnifiedHighlighter(
Builder builder,
OffsetSource offsetSource,
@Nullable Locale breakIteratorLocale,
String index,
String field,
Query query,
int noMatchSize,
int maxPassages,
int maxAnalyzedOffset,
QueryMaxAnalyzedOffset queryMaxAnalyzedOffset,
boolean requireFieldMatch,
boolean weightMatchesEnabled
) {
super(builder);
this.offsetSource = offsetSource;
this.breakIteratorLocale = breakIteratorLocale == null ? Locale.ROOT : breakIteratorLocale;
this.index = index;
this.field = field;
this.noMatchSize = noMatchSize;
this.maxAnalyzedOffset = maxAnalyzedOffset;
this.queryMaxAnalyzedOffset = queryMaxAnalyzedOffset;
if (weightMatchesEnabled == false || requireFieldMatch == false || weightMatchesUnsupported(query)) {
getFlags(field).remove(HighlightFlag.WEIGHT_MATCHES);
}
fieldHighlighter = (CustomFieldHighlighter) getFieldHighlighter(field, query, extractTerms(query), maxPassages);
}
/**
* Highlights the field value.
*/
public Snippet[] highlightField(LeafReader reader, int docId, CheckedSupplier<String, IOException> loadFieldValue) throws IOException {
if (fieldHighlighter.getFieldOffsetStrategy() == NoOpOffsetStrategy.INSTANCE && noMatchSize == 0) {
// If the query is such that there can't possibly be any matches then skip doing *everything*
return EMPTY_SNIPPET;
}
String fieldValue = loadFieldValue.get();
if (fieldValue == null) {
return null;
}
int fieldValueLength = fieldValue.length();
if ((queryMaxAnalyzedOffset == null || queryMaxAnalyzedOffset.getNotNull() > maxAnalyzedOffset)
&& (getOffsetSource(field) == OffsetSource.ANALYSIS)
&& (fieldValueLength > maxAnalyzedOffset)) {
throw new IllegalArgumentException(
"The length ["
+ fieldValueLength
+ "] of field ["
+ field
+ "] in doc["
+ docId
+ "]/index["
+ index
+ "] exceeds the ["
+ IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey()
+ "] limit ["
+ maxAnalyzedOffset
+ "]. To avoid this error, set "
+ "the query parameter ["
+ MAX_ANALYZED_OFFSET_FIELD
+ "] to a value less than index setting ["
+ maxAnalyzedOffset
+ "] and this will tolerate long field values by truncating them."
);
}
Snippet[] result = (Snippet[]) fieldHighlighter.highlightFieldForDoc(reader, docId, fieldValue);
return result == null ? EMPTY_SNIPPET : result;
}
public PassageFormatter getFormatter() {
return super.getFormatter(field);
}
@Override
protected FieldHighlighter newFieldHighlighter(
String field,
FieldOffsetStrategy fieldOffsetStrategy,
BreakIterator breakIterator,
PassageScorer passageScorer,
int maxPassages,
int maxNoHighlightPassages,
PassageFormatter passageFormatter,
Comparator<Passage> passageSortComparator
) {
return new CustomFieldHighlighter(
field,
fieldOffsetStrategy,
breakIteratorLocale,
breakIterator,
getScorer(field),
maxPassages,
(noMatchSize > 0 ? 1 : 0),
getFormatter(field),
passageSortComparator,
noMatchSize,
queryMaxAnalyzedOffset
);
}
@Override
protected Collection<Query> preSpanQueryRewrite(Query query) {
if (query instanceof MultiPhrasePrefixQuery mpq) {
Term[][] terms = mpq.getTerms();
int[] positions = mpq.getPositions();
SpanQuery[] positionSpanQueries = new SpanQuery[positions.length];
int sizeMinus1 = terms.length - 1;
for (int i = 0; i < positions.length; i++) {
SpanQuery[] innerQueries = new SpanQuery[terms[i].length];
for (int j = 0; j < terms[i].length; j++) {
if (i == sizeMinus1) {
innerQueries[j] = new SpanMultiTermQueryWrapper<>(new PrefixQuery(terms[i][j]));
} else {
innerQueries[j] = new SpanTermQuery(terms[i][j]);
}
}
if (innerQueries.length > 1) {
positionSpanQueries[i] = new SpanOrQuery(innerQueries);
} else {
positionSpanQueries[i] = innerQueries[0];
}
}
if (positionSpanQueries.length == 1) {
return Collections.singletonList(positionSpanQueries[0]);
}
// sum position increments beyond 1
int positionGaps = 0;
if (positions.length >= 2) {
// positions are in increasing order. max(0,...) is just a safeguard.
positionGaps = Math.max(0, positions[positions.length - 1] - positions[0] - positions.length + 1);
}
// if original slop is 0 then require inOrder
boolean inorder = (mpq.getSlop() == 0);
return Collections.singletonList(new SpanNearQuery(positionSpanQueries, mpq.getSlop() + positionGaps, inorder));
} else if (query instanceof ESToParentBlockJoinQuery) {
return Collections.singletonList(((ESToParentBlockJoinQuery) query).getChildQuery());
} else {
return null;
}
}
/**
* Forces the offset source for this highlighter
*/
@Override
protected OffsetSource getOffsetSource(String field) {
if (offsetSource == null) {
return super.getOffsetSource(field);
}
return offsetSource;
}
/**
* Returns true if the provided {@link Query} is not compatible with the {@link HighlightFlag#WEIGHT_MATCHES}
* mode of this highlighter.
*
* @param query The query to highlight
*/
private boolean weightMatchesUnsupported(Query query) {
boolean[] hasUnknownLeaf = new boolean[1];
query.visit(new QueryVisitor() {
@Override
public void visitLeaf(Query leafQuery) {
/**
* The parent-child query requires to load global ordinals and to access
* documents outside of the scope of the highlighted doc.
* We disable the {@link HighlightFlag#WEIGHT_MATCHES} mode in this case
* in order to preserve the compatibility.
*/
if (leafQuery.getClass().getSimpleName().equals("LateParsingQuery")) {
hasUnknownLeaf[0] = true;
}
/**
* KnnScoreDocQuery and RankDocsQuery requires the same reader that built the docs
* When using {@link HighlightFlag#WEIGHT_MATCHES} different readers are used and isn't supported by this query
* Additionally, kNN queries don't really work against MemoryIndex which is used in the matches API
*/
if (leafQuery instanceof RankDocsQuery.TopQuery || isKnnQuery(leafQuery)) {
hasUnknownLeaf[0] = true;
}
super.visitLeaf(query);
}
@Override
public void consumeTerms(Query leafQuery, Term... terms) {
if (leafQuery instanceof AbstractScriptFieldQuery) {
/**
* Queries on runtime fields don't support the matches API.
* TODO: We should add the support for keyword runtime fields.
*
*/
hasUnknownLeaf[0] = true;
}
super.consumeTerms(query, terms);
}
@Override
public void consumeTermsMatching(Query leafQuery, String field, Supplier<ByteRunAutomaton> automaton) {
if (leafQuery instanceof AbstractScriptFieldQuery) {
/**
* Queries on runtime fields don't support the matches API.
* TODO: We should add the support for keyword runtime fields.
*/
hasUnknownLeaf[0] = true;
}
super.consumeTermsMatching(query, field, automaton);
}
@Override
public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) {
/**
* Nested queries don't support the matches API.
*/
if (parent instanceof ESToParentBlockJoinQuery) {
hasUnknownLeaf[0] = true;
}
// we want to visit all queries, including those within the must_not clauses.
return this;
}
});
return hasUnknownLeaf[0];
}
}
|
CustomUnifiedHighlighter
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java
|
{
"start": 22536,
"end": 23014
}
|
class ____ {
private AllocateResponse response;
public AllocateResponseLock(AllocateResponse response) {
this.response = response;
}
public synchronized AllocateResponse getAllocateResponse() {
return response;
}
public synchronized void setAllocateResponse(AllocateResponse response) {
this.response = response;
}
}
@VisibleForTesting
public Server getServer() {
return this.server;
}
}
|
AllocateResponseLock
|
java
|
apache__kafka
|
metadata/src/test/java/org/apache/kafka/controller/metrics/ControllerMetricsTestUtils.java
|
{
"start": 2159,
"end": 3637
}
|
enum ____ {
NORMAL,
NON_PREFERRED_LEADER,
OFFLINE
}
public static PartitionRegistration fakePartitionRegistration(
FakePartitionRegistrationType type
) {
int leader = switch (type) {
case NORMAL -> 0;
case NON_PREFERRED_LEADER -> 1;
case OFFLINE -> -1;
};
return new PartitionRegistration.Builder().
setReplicas(new int[] {0, 1, 2}).
setDirectories(DirectoryId.migratingArray(3)).
setIsr(new int[] {0, 1, 2}).
setLeader(leader).
setLeaderRecoveryState(LeaderRecoveryState.RECOVERED).
setLeaderEpoch(100).
setPartitionEpoch(200).
build();
}
public static TopicImage fakeTopicImage(
String topicName,
Uuid topicId,
PartitionRegistration... registrations
) {
Map<Integer, PartitionRegistration> partitions = new HashMap<>();
int i = 0;
for (PartitionRegistration registration : registrations) {
partitions.put(i, registration);
i++;
}
return new TopicImage(topicName, topicId, partitions);
}
public static TopicsImage fakeTopicsImage(
TopicImage... topics
) {
TopicsImage image = TopicsImage.EMPTY;
for (TopicImage topic : topics) {
image = image.including(topic);
}
return image;
}
}
|
FakePartitionRegistrationType
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/type/classreading/MergedAnnotationMetadataVisitorTests.java
|
{
"start": 8081,
"end": 8272
}
|
enum ____ {
ONE, TWO, THREE
}
@AnnotationAnnotation(annotationValue = @NestedAnnotation("a"), annotationArrayValue = {
@NestedAnnotation("b"), @NestedAnnotation("c") })
static
|
ExampleEnum
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/annotations/BuildStep.java
|
{
"start": 2131,
"end": 2323
}
|
class ____ producer of this item. Alternatively items can be produced
* by simply returning them from the method.
* <p>
* If field injection is used then every {@code BuildStep} method on the
|
a
|
java
|
hibernate__hibernate-orm
|
local-build-plugins/src/main/java/org/hibernate/orm/properties/jdk17/SettingsCollector.java
|
{
"start": 1099,
"end": 3929
}
|
class ____ {
public static Map<SettingsDocSection, SortedSet<SettingDescriptor>> collectSettingDescriptors(
Directory javadocDirectory,
Map<String, SettingsDocSection> sections,
String publishedJavadocsUrl) {
return collectSettingDescriptors( javadocDirectory.getAsFile(), sections, publishedJavadocsUrl );
}
public static Map<SettingsDocSection, SortedSet<SettingDescriptor>> collectSettingDescriptors(
File javadocDirectory,
Map<String, SettingsDocSection> sections,
String publishedJavadocsUrl) {
final Map<SettingsDocSection, SortedSet<SettingDescriptor>> result = Utils.createResultMap( sections );
// Load the constant-values.html file with Jsoup and start processing it
final Document constantValuesJson = loadConstants( javadocDirectory );
final Elements captionClassDivs = constantValuesJson.select( "div.caption" );
for ( Element captionClassDiv : captionClassDivs ) {
final String className = captionClassDiv.selectFirst( "span" ).text();
// find the doc section descriptor defined for this class, if one
final SettingsDocSection docSection = findMatchingDocSection( className, sections );
if ( docSection == null ) {
// does not match any defined sections, skip it
continue;
}
// find the summary-table div that contains the constant->value mappings for class-name
final Element captionClassDivParent = captionClassDiv.parent();
final Element tableDiv = captionClassDivParent.selectFirst( ".summary-table" );
final Elements constantFqnColumns = tableDiv.select( ".col-first" );
final Elements constantValueColumns = tableDiv.select( ".col-last" );
// extract the Javadoc elements for each field on class-name
final Map<String, Element> classFieldJavadocs = extractClassFieldJavadocs( className, javadocDirectory );
// todo (settings-doc) : consider extracting all @see tags and grabbing ones that refer to other "setting field"
// and ultimately render "cross links" - i.e. `@see JdbcSettings#JAKARTA_JDBC_URL`.
// these are contained as notes in the Javadoc.
// this would require a second pass though after all "setting details" have bee processed.
// for now, we don't need this
//final Map<String, SettingWorkingDetails> settingWorkingDetailsMap = new HashMap<>();
for ( int c = 0; c < constantFqnColumns.size(); c++ ) {
final Element constantFqnColumn = constantFqnColumns.get( c );
if ( constantFqnColumn.hasClass( "table-header" ) ) {
continue;
}
final String constantFqn = constantFqnColumn.selectFirst( "code" ).id();
final String constantValue = constantValueColumns.get( c ).selectFirst( "code" ).text();
// locate the field javadoc from `classFieldJavadocs`.
// that map is keyed by the simple name of the field, so strip the
// package and
|
SettingsCollector
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/collection/EntityMapTest.java
|
{
"start": 1502,
"end": 1651
}
|
class ____ {
private Integer aId;
private B b1;
private B b2;
private C c1;
private C c2;
@MappedSuperclass
public static abstract
|
EntityMapTest
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/catalog/LanguageValidationResult.java
|
{
"start": 950,
"end": 1764
}
|
class ____ implements Serializable {
private final String text;
private String error;
private String shortError;
private int index;
public LanguageValidationResult(String text) {
this.text = text;
}
public String getText() {
return text;
}
public boolean isSuccess() {
return error == null;
}
public void setError(String error) {
this.error = error;
}
public String getError() {
return error;
}
public String getShortError() {
return shortError;
}
public void setShortError(String shortError) {
this.shortError = shortError;
}
public int getIndex() {
return index;
}
public void setIndex(int index) {
this.index = index;
}
}
|
LanguageValidationResult
|
java
|
quarkusio__quarkus
|
extensions/reactive-mysql-client/deployment/src/test/java/io/quarkus/reactive/mysql/client/CustomCredentialsProvider.java
|
{
"start": 258,
"end": 813
}
|
class ____ implements CredentialsProvider {
private static final Logger log = Logger.getLogger(CustomCredentialsProvider.class.getName());
@Override
public Map<String, String> getCredentials(String credentialsProviderName) {
Map<String, String> properties = new HashMap<>();
properties.put(USER_PROPERTY_NAME, "hibernate_orm_test");
properties.put(PASSWORD_PROPERTY_NAME, "hibernate_orm_test");
log.info("credentials provider returning " + properties);
return properties;
}
}
|
CustomCredentialsProvider
|
java
|
apache__flink
|
flink-filesystems/flink-s3-fs-base/src/main/java/com/amazonaws/services/s3/model/transform/XmlResponsesSaxParser.java
|
{
"start": 44981,
"end": 45834
}
|
class ____ extends AbstractHandler {
private String location = null;
/**
* @return the bucket's location.
*/
public String getLocation() {
return location;
}
@Override
protected void doStartElement(String uri, String name, String qName, Attributes attrs) {}
@Override
protected void doEndElement(String uri, String name, String qName) {
if (atTopLevel()) {
if (name.equals("LocationConstraint")) {
String elementText = getText();
if (elementText.length() == 0) {
location = null;
} else {
location = elementText;
}
}
}
}
}
public static
|
BucketLocationHandler
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/rest/RestController.java
|
{
"start": 3624,
"end": 36727
}
|
class ____ implements HttpServerTransport.Dispatcher {
private static final Logger logger = LogManager.getLogger(RestController.class);
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestController.class);
/**
* list of browser safelisted media types - not allowed on Content-Type header
* https://fetch.spec.whatwg.org/#cors-safelisted-request-header
*/
static final Set<String> SAFELISTED_MEDIA_TYPES = Set.of("application/x-www-form-urlencoded", "multipart/form-data", "text/plain");
static final String ELASTIC_PRODUCT_HTTP_HEADER = "X-elastic-product";
static final String ELASTIC_PRODUCT_HTTP_HEADER_VALUE = "Elasticsearch";
static final Set<String> RESERVED_PATHS = Set.of("/__elb_health__", "/__elb_health__/zk", "/_health", "/_health/zk");
private static final BytesReference FAVICON_RESPONSE;
public static final String STATUS_CODE_KEY = "es_rest_status_code";
public static final String HANDLER_NAME_KEY = "es_rest_handler_name";
public static final String REQUEST_METHOD_KEY = "es_rest_request_method";
public static final boolean ERROR_TRACE_DEFAULT = false;
static {
try (InputStream stream = RestController.class.getResourceAsStream("/config/favicon.ico")) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Streams.copy(stream, out);
FAVICON_RESPONSE = new BytesArray(out.toByteArray());
} catch (IOException e) {
throw new AssertionError(e);
}
}
private final PathTrie<MethodHandlers> handlers = new PathTrie<>(RestUtils.REST_DECODER);
private final RestInterceptor interceptor;
private final NodeClient client;
private final CircuitBreakerService circuitBreakerService;
private final UsageService usageService;
private final Tracer tracer;
private final LongCounter requestsCounter;
// If true, the ServerlessScope annotations will be enforced
private final ServerlessApiProtections apiProtections;
public static final String METRIC_REQUESTS_TOTAL = "es.rest.requests.total";
public RestController(
RestInterceptor restInterceptor,
NodeClient client,
CircuitBreakerService circuitBreakerService,
UsageService usageService,
TelemetryProvider telemetryProvider
) {
this.usageService = usageService;
this.tracer = telemetryProvider.getTracer();
this.requestsCounter = telemetryProvider.getMeterRegistry()
.registerLongCounter(METRIC_REQUESTS_TOTAL, "The total number of rest requests/responses processed", "unit");
if (restInterceptor == null) {
restInterceptor = (request, channel, targetHandler, listener) -> listener.onResponse(Boolean.TRUE);
}
this.interceptor = restInterceptor;
this.client = client;
this.circuitBreakerService = circuitBreakerService;
registerHandlerNoWrap(RestRequest.Method.GET, "/favicon.ico", RestApiVersion.current(), new RestFavIconHandler());
this.apiProtections = new ServerlessApiProtections(false);
}
public ServerlessApiProtections getApiProtections() {
return apiProtections;
}
/**
* Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request.
*
* @param method GET, POST, etc.
* @param path Path to handle (e.g. "/{index}/{type}/_bulk")
* @param version API version to handle (e.g. RestApiVersion.V_8)
* @param handler The handler to actually execute
* @param deprecationMessage The message to log and send as a header in the response
* @param deprecationLevel The deprecation log level to use for the deprecation warning, either WARN or CRITICAL
*/
protected void registerAsDeprecatedHandler(
RestRequest.Method method,
String path,
RestApiVersion version,
RestHandler handler,
String deprecationMessage,
Level deprecationLevel
) {
assert (handler instanceof DeprecationRestHandler) == false;
if (RestApiVersion.onOrAfter(RestApiVersion.minimumSupported()).test(version)) {
registerHandler(
method,
path,
version,
new DeprecationRestHandler(
handler,
method,
path,
deprecationLevel,
deprecationMessage,
deprecationLogger,
version != RestApiVersion.current()
)
);
}
}
/**
* Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request, or when provided
* with {@code replacedMethod} and {@code replacedPath}. Expected usage:
* <pre><code>
* // remove deprecation in next major release
* controller.registerAsDeprecatedHandler(POST, "/_forcemerge", RestApiVersion.V_8, someHandler,
* POST, "/_optimize", RestApiVersion.V_7);
* controller.registerAsDeprecatedHandler(POST, "/{index}/_forcemerge", RestApiVersion.V_8, someHandler,
* POST, "/{index}/_optimize", RestApiVersion.V_7);
* </code></pre>
* <p>
* The registered REST handler ({@code method} with {@code path}) is a normal REST handler that is not deprecated and it is
* replacing the deprecated REST handler ({@code replacedMethod} with {@code replacedPath}) that is using the <em>same</em>
* {@code handler}.
* <p>
* Deprecated REST handlers without a direct replacement should be deprecated directly using {@link #registerAsDeprecatedHandler}
* and a specific message.
*
* @param method GET, POST, etc.
* @param path Path to handle (e.g. "/_forcemerge")
* @param version API version to handle (e.g. RestApiVersion.V_8)
* @param handler The handler to actually execute
* @param replacedMethod GET, POST, etc.
* @param replacedPath <em>Replaced</em> path to handle (e.g. "/_optimize")
* @param replacedVersion <em>Replaced</em> API version to handle (e.g. RestApiVersion.V_7)
*/
protected void registerAsReplacedHandler(
RestRequest.Method method,
String path,
RestApiVersion version,
RestHandler handler,
RestRequest.Method replacedMethod,
String replacedPath,
RestApiVersion replacedVersion,
String replacedMessage,
Level deprecationLevel
) {
registerHandler(method, path, version, handler);
registerAsDeprecatedHandler(replacedMethod, replacedPath, replacedVersion, handler, replacedMessage, deprecationLevel);
}
/**
* Registers a REST handler to be executed when one of the provided methods and path match the request.
*
* @param method GET, POST, etc.
* @param path Path to handle (e.g. "/{index}/{type}/_bulk")
* @param version API version to handle (e.g. RestApiVersion.V_8)
* @param handler The handler to actually execute
*/
protected void registerHandler(RestRequest.Method method, String path, RestApiVersion version, RestHandler handler) {
if (handler instanceof BaseRestHandler) {
usageService.addRestHandler((BaseRestHandler) handler);
}
registerHandlerNoWrap(method, path, version, handler);
}
private void registerHandlerNoWrap(RestRequest.Method method, String path, RestApiVersion version, RestHandler handler) {
assert RestApiVersion.minimumSupported() == version || RestApiVersion.current() == version
: "REST API compatibility is only supported for version "
+ RestApiVersion.minimumSupported().major
+ " [method="
+ method
+ ", path="
+ path
+ ", handler="
+ handler.getClass().getCanonicalName()
+ "]";
if (RESERVED_PATHS.contains(path)) {
throw new IllegalArgumentException("path [" + path + "] is a reserved path and may not be registered");
}
// the HTTP OPTIONS method is treated internally, not by handlers, see {@code #handleNoHandlerFound}
assert method != RestRequest.Method.OPTIONS : "There should be no handlers registered for the OPTIONS HTTP method";
handlers.insertOrUpdate(
path,
new MethodHandlers(path).addMethod(method, version, handler),
(handlers, ignoredHandler) -> handlers.addMethod(method, version, handler)
);
}
public void registerHandler(final Route route, final RestHandler handler) {
if (route.hasReplacement()) {
Route replaced = route.getReplacedRoute();
registerAsReplacedHandler(
route.getMethod(),
route.getPath(),
route.getRestApiVersion(),
handler,
replaced.getMethod(),
replaced.getPath(),
replaced.getRestApiVersion(),
replaced.getDeprecationMessage(),
replaced.getDeprecationLevel()
);
} else if (route.isDeprecated()) {
registerAsDeprecatedHandler(
route.getMethod(),
route.getPath(),
route.getRestApiVersion(),
handler,
route.getDeprecationMessage(),
route.getDeprecationLevel()
);
} else {
// it's just a normal route
registerHandler(route.getMethod(), route.getPath(), route.getRestApiVersion(), handler);
}
}
/**
* Registers a REST handler with the controller. The REST handler declares the {@code method}
* and {@code path} combinations.
*/
public void registerHandler(final RestHandler handler) {
handler.routes().forEach(route -> registerHandler(route, handler));
}
@Override
public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) {
threadContext.addResponseHeader(ELASTIC_PRODUCT_HTTP_HEADER, ELASTIC_PRODUCT_HTTP_HEADER_VALUE);
try {
tryAllHandlers(request, channel, threadContext);
} catch (Exception e) {
try {
sendFailure(channel, e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error(() -> "failed to send failure response for uri [" + request.uri() + "]", inner);
}
}
}
@Override
public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) {
threadContext.addResponseHeader(ELASTIC_PRODUCT_HTTP_HEADER, ELASTIC_PRODUCT_HTTP_HEADER_VALUE);
try {
final Exception e;
if (cause == null) {
e = new ElasticsearchException("unknown cause");
} else if (cause instanceof Exception) {
e = (Exception) cause;
} else {
e = new ElasticsearchException(cause);
}
// unless it's a http headers validation error, we consider any exceptions encountered so far during request processing
// to be a problem of invalid/malformed request (hence the RestStatus#BAD_REQEST (400) HTTP response code)
if (e instanceof HttpHeadersValidationException) {
sendFailure(channel, (Exception) e.getCause());
} else {
channel.sendResponse(new RestResponse(channel, BAD_REQUEST, e));
recordRequestMetric(BAD_REQUEST, requestsCounter);
}
} catch (final IOException e) {
if (cause != null) {
e.addSuppressed(cause);
}
logger.warn("failed to send bad request response", e);
channel.sendResponse(new RestResponse(INTERNAL_SERVER_ERROR, RestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY));
recordRequestMetric(INTERNAL_SERVER_ERROR, requestsCounter);
}
}
public boolean checkSupported(
RestRequest.Method method,
String path,
Set<String> parameters,
Set<String> capabilities,
RestApiVersion restApiVersion
) {
Iterator<MethodHandlers> allHandlers = getAllHandlers(null, path);
while (allHandlers.hasNext()) {
RestHandler handler;
MethodHandlers handlers = allHandlers.next();
if (handlers == null) {
handler = null;
} else {
handler = handlers.getHandler(method, restApiVersion);
}
if (handler != null) {
var supportedParams = handler.supportedQueryParameters();
assert supportedParams == handler.supportedQueryParameters()
: handler.getName() + ": did not return same instance from supportedQueryParameters()";
return (supportedParams == null || supportedParams.containsAll(parameters))
&& handler.supportedCapabilities().containsAll(capabilities);
}
}
return false;
}
@Override
public Map<String, HttpRouteStats> getStats() {
final Iterator<MethodHandlers> methodHandlersIterator = handlers.allNodeValues();
final SortedMap<String, HttpRouteStats> allStats = new TreeMap<>();
while (methodHandlersIterator.hasNext()) {
final MethodHandlers mh = methodHandlersIterator.next();
final HttpRouteStats stats = mh.getStats();
if (stats.requestCount() > 0 || stats.responseCount() > 0) {
allStats.put(mh.getPath(), stats);
}
}
return Collections.unmodifiableSortedMap(allStats);
}
private void maybeAggregateAndDispatchRequest(
RestRequest restRequest,
RestChannel restChannel,
RestHandler handler,
MethodHandlers methodHandlers,
ThreadContext threadContext
) throws Exception {
if (handler.supportsContentStream()) {
dispatchRequest(restRequest, restChannel, handler, methodHandlers, threadContext);
} else {
RestContentAggregator.aggregate(restRequest, (aggregatedRequest) -> {
try {
dispatchRequest(aggregatedRequest, restChannel, handler, methodHandlers, threadContext);
} catch (Exception e) {
throw new ElasticsearchException(e);
}
});
}
}
private void dispatchRequest(
RestRequest request,
RestChannel channel,
RestHandler handler,
MethodHandlers methodHandlers,
ThreadContext threadContext
) throws Exception {
if (request.hasContent()) {
if (isContentTypeDisallowed(request) || handler.mediaTypesValid(request) == false) {
sendContentTypeErrorMessage(request.getAllHeaderValues("Content-Type"), channel);
return;
}
}
RestChannel responseChannel = channel;
if (apiProtections.isEnabled()) {
Scope scope = handler.getServerlessScope();
if (scope == null) {
handleServerlessRequestToProtectedResource(request.uri(), request.method(), responseChannel);
return;
}
}
final int contentLength = request.isFullContent() ? request.contentLength() : 0;
try {
if (handler.canTripCircuitBreaker()) {
inFlightRequestsBreaker(circuitBreakerService).addEstimateBytesAndMaybeBreak(contentLength, "<http_request>");
} else {
inFlightRequestsBreaker(circuitBreakerService).addWithoutBreaking(contentLength);
}
// iff we could reserve bytes for the request we need to send the response also over this channel
responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength, methodHandlers);
if (handler.allowSystemIndexAccessByDefault() == false) {
// The ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER indicates that the request is coming from an Elastic product and
// therefore we should allow a subset of external system index access.
// This header is intended for internal use only.
final String prodOriginValue = request.header(Task.X_ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER);
if (prodOriginValue != null) {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.TRUE.toString());
threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, prodOriginValue);
} else {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString());
}
} else {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.TRUE.toString());
}
if (apiProtections.isEnabled()) {
// API protections are only enabled in serverless; therefore we can use this as an indicator to mark the
// request as a serverless mode request here, so downstream handlers can use the marker
request.markAsServerlessRequest();
logger.trace("Marked request for uri [{}] as serverless request", request.uri());
}
final var finalChannel = responseChannel;
this.interceptor.intercept(request, responseChannel, handler.getConcreteRestHandler(), new ActionListener<>() {
@Override
public void onResponse(Boolean processRequest) {
if (processRequest) {
try {
validateRequest(request, handler, client);
handler.handleRequest(request, finalChannel, client);
} catch (Exception e) {
onFailure(e);
}
}
}
@Override
public void onFailure(Exception e) {
try {
sendFailure(finalChannel, e);
} catch (IOException ex) {
logger.info("Failed to send error [{}] to HTTP client", ex.toString());
}
}
});
} catch (Exception e) {
sendFailure(responseChannel, e);
}
}
/**
* Validates that the request should be allowed. Throws an exception if the request should be rejected.
*/
@SuppressWarnings("unused")
protected void validateRequest(RestRequest request, RestHandler handler, NodeClient client) throws ElasticsearchStatusException {}
private void sendFailure(RestChannel responseChannel, Exception e) throws IOException {
var restResponse = new RestResponse(responseChannel, e);
responseChannel.sendResponse(restResponse);
recordRequestMetric(restResponse.status(), requestsCounter);
}
/**
* in order to prevent CSRF we have to reject all media types that are from a browser safelist
* see https://fetch.spec.whatwg.org/#cors-safelisted-request-header
* see https://www.elastic.co/blog/strict-content-type-checking-for-elasticsearch-rest-requests
* @param request
*/
private static boolean isContentTypeDisallowed(RestRequest request) {
return request.getParsedContentType() != null
&& SAFELISTED_MEDIA_TYPES.contains(request.getParsedContentType().mediaTypeWithoutParameters());
}
private boolean handleNoHandlerFound(
ThreadContext threadContext,
String rawPath,
RestRequest.Method method,
String uri,
RestChannel channel
) {
// Get the map of matching handlers for a request, for the full set of HTTP methods.
final Set<RestRequest.Method> validMethodSet = getValidHandlerMethodSet(rawPath);
if (validMethodSet.contains(method) == false) {
if (method == RestRequest.Method.OPTIONS) {
startTrace(threadContext, channel);
handleOptionsRequest(channel, validMethodSet);
return true;
}
if (validMethodSet.isEmpty() == false) {
// If an alternative handler for an explicit path is registered to a
// different HTTP method than the one supplied - return a 405 Method
// Not Allowed error.
startTrace(threadContext, channel);
handleUnsupportedHttpMethod(uri, method, channel, validMethodSet, null);
return true;
}
}
return false;
}
private void startTrace(ThreadContext threadContext, RestChannel channel) {
startTrace(threadContext, channel, null);
}
private void startTrace(ThreadContext threadContext, RestChannel channel, String restPath) {
final RestRequest req = channel.request();
if (restPath == null) {
restPath = req.path();
}
String method = null;
try {
method = req.method().name();
} catch (IllegalArgumentException e) {
// Invalid methods throw an exception
}
String name;
if (method != null) {
name = method + " " + restPath;
} else {
name = restPath;
}
final Map<String, Object> attributes = Maps.newMapWithExpectedSize(req.getHeaders().size() + 3);
req.getHeaders().forEach((key, values) -> {
final String lowerKey = key.toLowerCase(Locale.ROOT).replace('-', '_');
attributes.put("http.request.headers." + lowerKey, values == null ? "" : String.join("; ", values));
});
attributes.put("http.method", Objects.requireNonNullElse(method, "<unknown>"));
attributes.put("http.url", Objects.requireNonNullElse(req.uri(), "<unknown>"));
switch (req.getHttpRequest().protocolVersion()) {
case HTTP_1_0 -> attributes.put("http.flavour", "1.0");
case HTTP_1_1 -> attributes.put("http.flavour", "1.1");
}
tracer.startTrace(threadContext, channel.request(), name, attributes);
}
private void traceException(RestChannel channel, Throwable e) {
this.tracer.addError(channel.request(), e);
}
private static void sendContentTypeErrorMessage(@Nullable List<String> contentTypeHeader, RestChannel channel) throws IOException {
final String errorMessage;
if (contentTypeHeader == null) {
errorMessage = "Content-Type header is missing";
} else {
errorMessage = "Content-Type header [" + Strings.collectionToCommaDelimitedString(contentTypeHeader) + "] is not supported";
}
channel.sendResponse(RestResponse.createSimpleErrorResponse(channel, NOT_ACCEPTABLE, errorMessage));
}
private void tryAllHandlers(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) throws Exception {
try {
validateErrorTrace(request, channel);
} catch (IllegalArgumentException e) {
startTrace(threadContext, channel);
channel.sendResponse(RestResponse.createSimpleErrorResponse(channel, BAD_REQUEST, e.getMessage()));
recordRequestMetric(BAD_REQUEST, requestsCounter);
return;
}
final String rawPath = request.rawPath();
final String uri = request.uri();
final RestRequest.Method requestMethod;
RestApiVersion restApiVersion = request.getRestApiVersion();
try {
// Resolves the HTTP method and fails if the method is invalid
requestMethod = request.method();
// Loop through all possible handlers, attempting to dispatch the request
Iterator<MethodHandlers> allHandlers = getAllHandlers(request.params(), rawPath);
while (allHandlers.hasNext()) {
final RestHandler handler;
final MethodHandlers handlers = allHandlers.next();
if (handlers == null) {
handler = null;
} else {
handler = handlers.getHandler(requestMethod, restApiVersion);
}
if (handler == null) {
if (handleNoHandlerFound(threadContext, rawPath, requestMethod, uri, channel)) {
return;
}
} else {
startTrace(threadContext, channel, handlers.getPath());
var decoratedChannel = new MeteringRestChannelDecorator(channel, requestsCounter, handler.getConcreteRestHandler());
maybeAggregateAndDispatchRequest(request, decoratedChannel, handler, handlers, threadContext);
return;
}
}
} catch (final IllegalArgumentException e) {
startTrace(threadContext, channel);
traceException(channel, e);
handleUnsupportedHttpMethod(uri, null, channel, getValidHandlerMethodSet(rawPath), e);
return;
}
// If request has not been handled, fallback to a bad request error.
startTrace(threadContext, channel);
handleBadRequest(uri, requestMethod, channel);
}
private static void validateErrorTrace(RestRequest request, RestChannel channel) {
// error_trace cannot be used when we disable detailed errors
// we consume the error_trace parameter first to ensure that it is always consumed
if (request.paramAsBoolean("error_trace", ERROR_TRACE_DEFAULT) && channel.detailedErrorsEnabled() == false) {
throw new IllegalArgumentException("error traces in responses are disabled.");
}
}
Iterator<MethodHandlers> getAllHandlers(@Nullable Map<String, String> requestParamsRef, String rawPath) {
final Supplier<Map<String, String>> paramsSupplier;
if (requestParamsRef == null) {
paramsSupplier = () -> null;
} else {
// Between retrieving the correct path, we need to reset the parameters,
// otherwise parameters are parsed out of the URI that aren't actually handled.
final Map<String, String> originalParams = Map.copyOf(requestParamsRef);
paramsSupplier = () -> {
// PathTrie modifies the request, so reset the params between each iteration
requestParamsRef.clear();
requestParamsRef.putAll(originalParams);
return requestParamsRef;
};
}
// we use rawPath since we don't want to decode it while processing the path resolution
// so we can handle things like:
// my_index/my_type/http%3A%2F%2Fwww.google.com
return handlers.retrieveAll(rawPath, paramsSupplier).iterator();
}
/**
* Returns the holder for search usage statistics, to be used to track search usage when parsing
* incoming search requests from the relevant REST endpoints. This is exposed for plugins that
* expose search functionalities which need to contribute to the search usage statistics.
*/
public SearchUsageHolder getSearchUsageHolder() {
return usageService.getSearchUsageHolder();
}
/**
* Handle requests to a valid REST endpoint using an unsupported HTTP
* method. A 405 HTTP response code is returned, and the response 'Allow'
* header includes a list of valid HTTP methods for the endpoint (see
* <a href="https://tools.ietf.org/html/rfc2616#section-10.4.6">HTTP/1.1 -
* 10.4.6 - 405 Method Not Allowed</a>).
*/
private void handleUnsupportedHttpMethod(
String uri,
@Nullable RestRequest.Method method,
final RestChannel channel,
final Set<RestRequest.Method> validMethodSet,
@Nullable final IllegalArgumentException exception
) {
try {
final StringBuilder msg = new StringBuilder();
if (exception == null) {
msg.append("Incorrect HTTP method for uri [").append(uri);
msg.append("] and method [").append(method).append("]");
} else {
msg.append(exception.getMessage());
}
if (validMethodSet.isEmpty() == false) {
msg.append(", allowed: ").append(validMethodSet);
}
RestResponse restResponse = RestResponse.createSimpleErrorResponse(channel, METHOD_NOT_ALLOWED, msg.toString());
if (validMethodSet.isEmpty() == false) {
restResponse.addHeader("Allow", Strings.collectionToDelimitedString(validMethodSet, ","));
}
channel.sendResponse(restResponse);
recordRequestMetric(METHOD_NOT_ALLOWED, requestsCounter);
} catch (final IOException e) {
logger.warn("failed to send bad request response", e);
channel.sendResponse(new RestResponse(INTERNAL_SERVER_ERROR, RestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY));
recordRequestMetric(INTERNAL_SERVER_ERROR, requestsCounter);
}
}
/**
* Handle HTTP OPTIONS requests to a valid REST endpoint. A 200 HTTP
* response code is returned, and the response 'Allow' header includes a
* list of valid HTTP methods for the endpoint (see
* <a href="https://tools.ietf.org/html/rfc2616#section-9.2">HTTP/1.1 - 9.2
* - Options</a>).
*/
private void handleOptionsRequest(RestChannel channel, Set<RestRequest.Method> validMethodSet) {
RestResponse restResponse = new RestResponse(OK, TEXT_CONTENT_TYPE, BytesArray.EMPTY);
// When we have an OPTIONS HTTP request and no valid handlers, simply send OK by default (with the Access Control Origin header
// which gets automatically added).
if (validMethodSet.isEmpty() == false) {
restResponse.addHeader("Allow", Strings.collectionToDelimitedString(validMethodSet, ","));
}
channel.sendResponse(restResponse);
recordRequestMetric(OK, requestsCounter);
}
/**
* Handle a requests with no candidate handlers (return a 400 Bad Request
* error).
*/
private void handleBadRequest(String uri, RestRequest.Method method, RestChannel channel) throws IOException {
try (XContentBuilder builder = channel.newErrorBuilder()) {
builder.startObject();
{
builder.field("error", "no handler found for uri [" + uri + "] and method [" + method + "]");
}
builder.endObject();
channel.sendResponse(new RestResponse(BAD_REQUEST, builder));
recordRequestMetric(BAD_REQUEST, requestsCounter);
}
}
private void handleServerlessRequestToProtectedResource(String uri, RestRequest.Method method, RestChannel channel) throws IOException {
String msg = "uri [" + uri + "] with method [" + method + "] exists but is not available when running in serverless mode";
sendFailure(channel, new ApiNotAvailableException(msg));
}
/**
* Get the valid set of HTTP methods for a REST request.
*/
private Set<RestRequest.Method> getValidHandlerMethodSet(String rawPath) {
Set<RestRequest.Method> validMethods = EnumSet.noneOf(RestRequest.Method.class);
Iterator<MethodHandlers> allHandlers = getAllHandlers(null, rawPath);
while (allHandlers.hasNext()) {
final MethodHandlers methodHandlers = allHandlers.next();
if (methodHandlers != null) {
validMethods.addAll(methodHandlers.getValidMethods());
}
}
return validMethods;
}
private static void recordRequestMetric(RestStatus statusCode, String handlerName, String requestMethod, LongCounter requestsCounter) {
try {
Map<String, Object> attributes = Map.of(
STATUS_CODE_KEY,
statusCode.getStatus(),
HANDLER_NAME_KEY,
handlerName,
REQUEST_METHOD_KEY,
requestMethod
);
requestsCounter.incrementBy(1, attributes);
} catch (Exception ex) {
logger.error("Cannot track request status code", ex);
}
}
private static void recordRequestMetric(RestStatus statusCode, LongCounter requestsCounter) {
try {
Map<String, Object> attributes = Map.of(STATUS_CODE_KEY, statusCode.getStatus());
requestsCounter.incrementBy(1, attributes);
} catch (Exception ex) {
logger.error("Cannot track request status code", ex);
}
}
private static
|
RestController
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/Aggregator.java
|
{
"start": 1849,
"end": 2309
}
|
interface ____<K, V, VAgg> {
/**
* Compute a new aggregate from the key and value of a record and the current aggregate of the same key.
*
* @param key
* the key of the record
* @param value
* the value of the record
* @param aggregate
* the current aggregate value
*
* @return the new aggregate value
*/
VAgg apply(final K key, final V value, final VAgg aggregate);
}
|
Aggregator
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/util/concurrent/AggregateFutureStateFallbackAtomicHelperTest.java
|
{
"start": 1900,
"end": 2110
}
|
class ____ which make certain
* platform classes unavailable. Then we construct a test suite so we can run the normal FuturesTest
* test methods in these degenerate classloaders.
*/
@NullUnmarked
public
|
loaders
|
java
|
quarkusio__quarkus
|
extensions/vertx/runtime/src/main/java/io/quarkus/vertx/core/runtime/context/SafeVertxContextInterceptor.java
|
{
"start": 483,
"end": 1654
}
|
class ____ {
@Inject
Vertx vertx;
private final static Logger LOGGER = Logger.getLogger(SafeVertxContextInterceptor.class);
@AroundInvoke
public Object markTheContextSafe(ArcInvocationContext ic) throws Exception {
final io.vertx.core.Context current = vertx.getOrCreateContext();
if (VertxContextSafetyToggle.isExplicitlyMarkedAsSafe(current)) {
return ic.proceed();
}
var annotation = ic.findIterceptorBinding(SafeVertxContext.class);
boolean unsafe = VertxContextSafetyToggle.isExplicitlyMarkedAsUnsafe(current);
if (unsafe && annotation.force()) {
LOGGER.debugf("Force the duplicated context as `safe` while is was explicitly marked as `unsafe` in %s.%s",
ic.getMethod().getDeclaringClass().getName(), ic.getMethod().getName());
} else if (unsafe) {
throw new IllegalStateException(
"Unable to mark the context as safe, as the current context is explicitly marked as unsafe");
}
VertxContextSafetyToggle.setContextSafe(current, true);
return ic.proceed();
}
}
|
SafeVertxContextInterceptor
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/hash/FunnelsTest.java
|
{
"start": 1286,
"end": 6144
}
|
class ____ extends TestCase {
public void testForBytes() {
PrimitiveSink primitiveSink = mock(PrimitiveSink.class);
Funnels.byteArrayFunnel().funnel(new byte[] {4, 3, 2, 1}, primitiveSink);
verify(primitiveSink).putBytes(new byte[] {4, 3, 2, 1});
}
public void testForBytes_null() {
assertNullsThrowException(Funnels.byteArrayFunnel());
}
public void testForStrings() {
PrimitiveSink primitiveSink = mock(PrimitiveSink.class);
Funnels.unencodedCharsFunnel().funnel("test", primitiveSink);
verify(primitiveSink).putUnencodedChars("test");
}
public void testForStrings_null() {
assertNullsThrowException(Funnels.unencodedCharsFunnel());
}
public void testForStringsCharset() {
for (Charset charset : Charset.availableCharsets().values()) {
PrimitiveSink primitiveSink = mock(PrimitiveSink.class);
Funnels.stringFunnel(charset).funnel("test", primitiveSink);
verify(primitiveSink).putString("test", charset);
}
}
public void testForStringsCharset_null() {
for (Charset charset : Charset.availableCharsets().values()) {
assertNullsThrowException(Funnels.stringFunnel(charset));
}
}
public void testForInts() {
Integer value = 1234;
PrimitiveSink primitiveSink = mock(PrimitiveSink.class);
Funnels.integerFunnel().funnel(value, primitiveSink);
verify(primitiveSink).putInt(1234);
}
public void testForInts_null() {
assertNullsThrowException(Funnels.integerFunnel());
}
public void testForLongs() {
Long value = 1234L;
PrimitiveSink primitiveSink = mock(PrimitiveSink.class);
Funnels.longFunnel().funnel(value, primitiveSink);
verify(primitiveSink).putLong(1234);
}
public void testForLongs_null() {
assertNullsThrowException(Funnels.longFunnel());
}
public void testSequential() {
@SuppressWarnings({"unchecked", "DoNotMock"})
Funnel<Object> elementFunnel = mock(Funnel.class);
PrimitiveSink primitiveSink = mock(PrimitiveSink.class);
Funnel<Iterable<?>> sequential = Funnels.sequentialFunnel(elementFunnel);
sequential.funnel(Arrays.asList("foo", "bar", "baz", "quux"), primitiveSink);
InOrder inOrder = inOrder(elementFunnel);
inOrder.verify(elementFunnel).funnel("foo", primitiveSink);
inOrder.verify(elementFunnel).funnel("bar", primitiveSink);
inOrder.verify(elementFunnel).funnel("baz", primitiveSink);
inOrder.verify(elementFunnel).funnel("quux", primitiveSink);
}
private static void assertNullsThrowException(Funnel<?> funnel) {
PrimitiveSink primitiveSink =
new AbstractStreamingHasher(4, 4) {
@Override
protected HashCode makeHash() {
throw new UnsupportedOperationException();
}
@Override
protected void process(ByteBuffer bb) {
while (bb.hasRemaining()) {
bb.get();
}
}
};
try {
funnel.funnel(null, primitiveSink);
fail();
} catch (NullPointerException ok) {
}
}
public void testAsOutputStream() throws Exception {
PrimitiveSink sink = mock(PrimitiveSink.class);
OutputStream out = Funnels.asOutputStream(sink);
byte[] bytes = {1, 2, 3, 4};
out.write(255);
out.write(bytes);
out.write(bytes, 1, 2);
verify(sink).putByte((byte) 255);
verify(sink).putBytes(bytes);
verify(sink).putBytes(bytes, 1, 2);
}
public void testSerialization() {
assertSame(
Funnels.byteArrayFunnel(), SerializableTester.reserialize(Funnels.byteArrayFunnel()));
assertSame(Funnels.integerFunnel(), SerializableTester.reserialize(Funnels.integerFunnel()));
assertSame(Funnels.longFunnel(), SerializableTester.reserialize(Funnels.longFunnel()));
assertSame(
Funnels.unencodedCharsFunnel(),
SerializableTester.reserialize(Funnels.unencodedCharsFunnel()));
assertEquals(
Funnels.sequentialFunnel(Funnels.integerFunnel()),
SerializableTester.reserialize(Funnels.sequentialFunnel(Funnels.integerFunnel())));
assertEquals(
Funnels.stringFunnel(US_ASCII),
SerializableTester.reserialize(Funnels.stringFunnel(US_ASCII)));
}
public void testEquals() {
new EqualsTester()
.addEqualityGroup(Funnels.byteArrayFunnel())
.addEqualityGroup(Funnels.integerFunnel())
.addEqualityGroup(Funnels.longFunnel())
.addEqualityGroup(Funnels.unencodedCharsFunnel())
.addEqualityGroup(Funnels.stringFunnel(UTF_8))
.addEqualityGroup(Funnels.stringFunnel(US_ASCII))
.addEqualityGroup(
Funnels.sequentialFunnel(Funnels.integerFunnel()),
SerializableTester.reserialize(Funnels.sequentialFunnel(Funnels.integerFunnel())))
.addEqualityGroup(Funnels.sequentialFunnel(Funnels.longFunnel()))
.testEquals();
}
}
|
FunnelsTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java
|
{
"start": 20614,
"end": 21109
}
|
class ____ {
@Deprecated
@InlineMe(
replacement = "this.setTimeout(Duration.ZERO)",
imports = {"java.time.Duration"})
public void clearTimeout() {
setTimeout(Duration.ZERO);
}
public void setTimeout(Duration timeout) {}
}
""")
.expectUnchanged()
.addInputLines(
"Caller.java",
"""
public final
|
Client
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/HttpServerCodec.java
|
{
"start": 7188,
"end": 8108
}
|
class ____ extends HttpResponseEncoder {
private HttpMethod method;
@Override
protected void sanitizeHeadersBeforeEncode(HttpResponse msg, boolean isAlwaysEmpty) {
if (!isAlwaysEmpty && HttpMethod.CONNECT.equals(method)
&& msg.status().codeClass() == HttpStatusClass.SUCCESS) {
// Stripping Transfer-Encoding:
// See https://tools.ietf.org/html/rfc7230#section-3.3.1
msg.headers().remove(HttpHeaderNames.TRANSFER_ENCODING);
return;
}
super.sanitizeHeadersBeforeEncode(msg, isAlwaysEmpty);
}
@Override
protected boolean isContentAlwaysEmpty(@SuppressWarnings("unused") HttpResponse msg) {
method = queue.poll();
return HttpMethod.HEAD.equals(method) || super.isContentAlwaysEmpty(msg);
}
}
}
|
HttpServerResponseEncoder
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/BadImportTest.java
|
{
"start": 4697,
"end": 5159
}
|
class ____ {
Blah() {
of(); // Left unchanged, because this is invoking Test.Blah.of.
}
void of() {}
}
ImmutableList<?> list = ImmutableList.of();
}
""")
.doTest();
}
@Test
public void negative_static_noStaticImport() {
compilationTestHelper
.addSourceLines(
"in/Test.java",
"""
|
Blah
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/writing/LazyMapKeyProxyGenerator.java
|
{
"start": 4081,
"end": 4400
}
|
class ____.
XPropertySpec keepFieldTypeField =
XPropertySpecs.builder(KEEP_FIELD_TYPE_FIELD, lazyClassMapKeyClassName)
.addModifiers(STATIC)
.addAnnotation(XTypeNames.KEEP_FIELD_TYPE)
.build();
return ImmutableList.of(keepFieldTypeField, lazyClassKeyField);
}
}
|
loading
|
java
|
spring-projects__spring-security
|
buildSrc/src/test/java/io/spring/gradle/TestKit.java
|
{
"start": 907,
"end": 1808
}
|
class ____ {
final File buildDir;
public TestKit(File buildDir) {
this.buildDir = buildDir;
}
public File getRootDir() {
return buildDir;
}
public GradleRunner withProjectDir(File projectDir) throws IOException {
FileUtils.copyDirectory(projectDir, buildDir);
return GradleRunner.create()
.withProjectDir(buildDir)
.withPluginClasspath();
}
public GradleRunner withProjectResource(String projectResourceName) throws IOException, URISyntaxException {
ClassLoader classLoader = getClass().getClassLoader();
Enumeration<URL> resources = classLoader.getResources(projectResourceName);
if(!resources.hasMoreElements()) {
throw new IOException("Cannot find resource " + projectResourceName + " with " + classLoader);
}
URL resourceUrl = resources.nextElement();
File projectDir = Paths.get(resourceUrl.toURI()).toFile();
return withProjectDir(projectDir);
}
}
|
TestKit
|
java
|
spring-projects__spring-boot
|
cli/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/CommandRunner.java
|
{
"start": 1105,
"end": 1275
}
|
class ____ to run {@link Command}s.
*
* @author Phillip Webb
* @since 1.0.0
* @see #addCommand(Command)
* @see CommandRunner#runAndHandleErrors(String[])
*/
public
|
used
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java
|
{
"start": 13715,
"end": 14433
}
|
class ____ implements Reducer<Long, String,
Long, String> {
private MultipleOutputs mos;
public void configure(JobConf conf) {
mos = new MultipleOutputs(conf);
}
public void reduce(Long key, Iterator<String> values,
OutputCollector<Long, String> output,
Reporter reporter)
throws IOException {
while (values.hasNext()) {
String value = values.next();
if (!value.equals("b")) {
output.collect(key, value);
} else {
mos.getCollector("text", reporter).collect(key, "text");
}
}
}
public void close() throws IOException {
mos.close();
}
}
}
|
MOJavaSerDeReduce
|
java
|
apache__camel
|
components/camel-mllp/src/test/java/org/apache/camel/component/mllp/MllpTcpServerConsumerManualAcknowledgementWithBridgeErrorHandlerInOnlyTest.java
|
{
"start": 1023,
"end": 3841
}
|
class ____
extends TcpServerConsumerAcknowledgementTestSupport {
@Override
protected boolean isBridgeErrorHandler() {
return true;
}
@Override
protected boolean isAutoAck() {
return false;
}
@Override
protected ExchangePattern exchangePattern() {
return ExchangePattern.InOnly;
}
@Test
public void testReceiveSingleMessage() throws Exception {
result.expectedBodiesReceived(TEST_MESSAGE);
complete.expectedBodiesReceived(TEST_MESSAGE);
receiveSingleMessage();
Exchange completeExchange = complete.getReceivedExchanges().get(0);
assertNull(completeExchange.getIn().getHeader(MllpConstants.MLLP_ACKNOWLEDGEMENT));
assertNull(completeExchange.getIn().getHeader(MllpConstants.MLLP_ACKNOWLEDGEMENT_STRING));
}
@Test
public void testUnparsableMessage() throws Exception {
final String testMessage = "MSH" + TEST_MESSAGE;
result.expectedBodiesReceived(testMessage);
complete.expectedMessageCount(1);
unparsableMessage(testMessage);
assertNull(result.getReceivedExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT),
"Should not have the exception in the exchange property");
assertNull(complete.getReceivedExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT),
"Should not have the exception in the exchange property");
}
@Test
public void testMessageWithEmptySegment() throws Exception {
final String testMessage = TEST_MESSAGE.replace("\rPID|", "\r\rPID|");
result.expectedBodiesReceived(testMessage);
complete.expectedMessageCount(1);
unparsableMessage(testMessage);
assertNull(result.getReceivedExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT),
"Should not have the exception in the exchange property");
assertNull(complete.getReceivedExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT),
"Should not have the exception in the exchange property");
}
@Test
public void testMessageWithEmbeddedNewlines() throws Exception {
final String testMessage = TEST_MESSAGE.replace("\rPID|", "\r\n\rPID|\n");
result.expectedBodiesReceived(testMessage);
complete.expectedMessageCount(1);
unparsableMessage(testMessage);
assertNull(result.getReceivedExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT),
"Should not have the exception in the exchange property");
assertNull(complete.getReceivedExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT),
"Should not have the exception in the exchange property");
}
}
|
MllpTcpServerConsumerManualAcknowledgementWithBridgeErrorHandlerInOnlyTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
|
{
"start": 197858,
"end": 198933
}
|
class ____ extends BooleanExpressionContext {
public TerminalNode NOT() { return getToken(EsqlBaseParser.NOT, 0); }
public BooleanExpressionContext booleanExpression() {
return getRuleContext(BooleanExpressionContext.class,0);
}
@SuppressWarnings("this-escape")
public LogicalNotContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterLogicalNot(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitLogicalNot(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitLogicalNot(this);
else return visitor.visitChildren(this);
}
}
@SuppressWarnings("CheckReturnValue")
public static
|
LogicalNotContext
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/layout/GelfLayoutTest.java
|
{
"start": 2361,
"end": 14241
}
|
class ____ {
static ConfigurationFactory configFactory = new BasicConfigurationFactory();
private static final String HOSTNAME = "TheHost";
private static final String KEY1 = "Key1";
private static final String KEY2 = "Key2";
private static final String LINE1 = "empty mdc";
private static final String LINE2 = "filled mdc";
private static final String LINE3 = "error message";
private static final String MDCKEY1 = "MdcKey1";
private static final String MDCKEY2 = "MdcKey2";
private static final String MDCVALUE1 = "MdcValue1";
private static final String MDCVALUE2 = "MdcValue2";
private static final String VALUE1 = "Value1";
@AfterAll
static void cleanupClass() {
ConfigurationFactory.removeConfigurationFactory(configFactory);
}
@BeforeAll
static void setupClass() {
ConfigurationFactory.setConfigurationFactory(configFactory);
final LoggerContext ctx = LoggerContext.getContext();
ctx.reconfigure();
}
LoggerContext ctx = LoggerContext.getContext();
Logger root = ctx.getRootLogger();
private void testCompressedLayout(
final CompressionType compressionType,
final boolean includeStacktrace,
final boolean includeThreadContext,
String host,
final boolean includeNullDelimiter,
final boolean includeNewLineDelimiter)
throws IOException {
for (final Appender appender : root.getAppenders().values()) {
root.removeAppender(appender);
}
// set up appenders
final GelfLayout layout = GelfLayout.newBuilder()
.setConfiguration(ctx.getConfiguration())
.setHost(host)
.setAdditionalFields(new KeyValuePair[] {
new KeyValuePair(KEY1, VALUE1), new KeyValuePair(KEY2, "${java:runtime}"),
})
.setCompressionType(compressionType)
.setCompressionThreshold(1024)
.setIncludeStacktrace(includeStacktrace)
.setIncludeThreadContext(includeThreadContext)
.setIncludeNullDelimiter(includeNullDelimiter)
.setIncludeNewLineDelimiter(includeNewLineDelimiter)
.build();
final ListAppender eventAppender = new ListAppender("Events", null, null, true, false);
final ListAppender rawAppender = new ListAppender("Raw", null, layout, true, true);
final ListAppender formattedAppender = new ListAppender("Formatted", null, layout, true, false);
final EncodingListAppender encodedAppender = new EncodingListAppender("Encoded", null, layout, false, true);
eventAppender.start();
rawAppender.start();
formattedAppender.start();
encodedAppender.start();
if (host == null) {
host = NetUtils.getLocalHostname();
}
final JavaLookup javaLookup = new JavaLookup();
// set appenders on root and set level to debug
root.addAppender(eventAppender);
root.addAppender(rawAppender);
root.addAppender(formattedAppender);
root.addAppender(encodedAppender);
root.setLevel(Level.DEBUG);
root.debug(LINE1);
ThreadContext.put(MDCKEY1, MDCVALUE1);
ThreadContext.put(MDCKEY2, MDCVALUE2);
root.info(LINE2);
final Exception exception = new RuntimeException("some error");
root.error(LINE3, exception);
formattedAppender.stop();
final List<LogEvent> events = eventAppender.getEvents();
final List<byte[]> raw = rawAppender.getData();
final List<String> messages = formattedAppender.getMessages();
final List<byte[]> raw2 = encodedAppender.getData();
final String threadName = Thread.currentThread().getName();
// @formatter:off
String message = messages.get(0);
if (includeNullDelimiter) {
assertThat(message.indexOf(Chars.NUL)).isEqualTo(message.length() - 1);
message = message.replace(Chars.NUL, Chars.LF);
}
assertJsonEquals(
"{" + "\"version\": \"1.1\","
+ "\"host\": \""
+ host + "\"," + "\"timestamp\": "
+ GelfLayout.formatTimestamp(events.get(0).getTimeMillis()) + "," + "\"level\": 7,"
+ "\"_thread\": \""
+ threadName + "\"," + "\"_logger\": \"\","
+ "\"short_message\": \""
+ LINE1 + "\"," + "\"_"
+ KEY1 + "\": \"" + VALUE1 + "\"," + "\"_"
+ KEY2 + "\": \"" + javaLookup.getRuntime() + "\"" + "}",
message);
message = messages.get(1);
if (includeNullDelimiter) {
assertThat(message.indexOf(Chars.NUL)).isEqualTo(message.length() - 1);
message = message.replace(Chars.NUL, Chars.LF);
}
assertJsonEquals(
"{" + "\"version\": \"1.1\","
+ "\"host\": \""
+ host + "\"," + "\"timestamp\": "
+ GelfLayout.formatTimestamp(events.get(1).getTimeMillis()) + "," + "\"level\": 6,"
+ "\"_thread\": \""
+ threadName + "\"," + "\"_logger\": \"\","
+ "\"short_message\": \""
+ LINE2 + "\","
+ (includeThreadContext
? "\"_" + MDCKEY1 + "\": \"" + MDCVALUE1 + "\"," + "\"_" + MDCKEY2 + "\": \""
+ MDCVALUE2 + "\","
: "")
+ "\"_"
+ KEY1 + "\": \"" + VALUE1 + "\"," + "\"_"
+ KEY2 + "\": \"" + javaLookup.getRuntime() + "\"" + "}",
message);
// @formatter:on
final byte[] compressed = raw.get(2);
final byte[] compressed2 = raw2.get(2);
final ByteArrayInputStream bais = new ByteArrayInputStream(compressed);
final ByteArrayInputStream bais2 = new ByteArrayInputStream(compressed2);
InputStream inflaterStream;
InputStream inflaterStream2;
switch (compressionType) {
case GZIP:
inflaterStream = new GZIPInputStream(bais);
inflaterStream2 = new GZIPInputStream(bais2);
break;
case ZLIB:
inflaterStream = new InflaterInputStream(bais);
inflaterStream2 = new InflaterInputStream(bais2);
break;
case OFF:
inflaterStream = bais;
inflaterStream2 = bais2;
break;
default:
throw new IllegalStateException("Missing test case clause");
}
final byte[] uncompressed = IOUtils.toByteArray(inflaterStream);
final byte[] uncompressed2 = IOUtils.toByteArray(inflaterStream2);
inflaterStream.close();
inflaterStream2.close();
String uncompressedString = new String(uncompressed, layout.getCharset());
String uncompressedString2 = new String(uncompressed2, layout.getCharset());
// @formatter:off
final String expected = "{" + "\"version\": \"1.1\","
+ "\"host\": \""
+ host + "\"," + "\"timestamp\": "
+ GelfLayout.formatTimestamp(events.get(2).getTimeMillis()) + "," + "\"level\": 3,"
+ "\"_thread\": \""
+ threadName + "\"," + "\"_logger\": \"\","
+ "\"short_message\": \""
+ LINE3 + "\"," + "\"full_message\": \""
+ String.valueOf(JsonStringEncoder.getInstance()
.quoteAsString(
includeStacktrace
? GelfLayout.formatThrowable(exception).toString()
: exception.toString()))
+ "\","
+ (includeThreadContext
? "\"_" + MDCKEY1 + "\": \"" + MDCVALUE1 + "\"," + "\"_" + MDCKEY2 + "\": \"" + MDCVALUE2
+ "\","
: "")
+ "\"_"
+ KEY1 + "\": \"" + VALUE1 + "\"," + "\"_"
+ KEY2 + "\": \"" + javaLookup.getRuntime() + "\"" + "}";
// @formatter:on
if (includeNullDelimiter) {
assertEquals(uncompressedString.indexOf(Chars.NUL), uncompressedString.length() - 1);
assertEquals(uncompressedString2.indexOf(Chars.NUL), uncompressedString2.length() - 1);
uncompressedString = uncompressedString.replace(Chars.NUL, Chars.LF);
uncompressedString2 = uncompressedString2.replace(Chars.NUL, Chars.LF);
}
if (includeNewLineDelimiter) {
assertEquals(uncompressedString.indexOf(Chars.LF), uncompressedString.length() - 1);
assertEquals(uncompressedString2.indexOf(Chars.LF), uncompressedString2.length() - 1);
}
assertJsonEquals(expected, uncompressedString);
assertJsonEquals(expected, uncompressedString2);
}
@Test
void testLayoutGzipCompression() throws Exception {
testCompressedLayout(CompressionType.GZIP, true, true, HOSTNAME, false, false);
}
@Test
void testLayoutNoCompression() throws Exception {
testCompressedLayout(CompressionType.OFF, true, true, HOSTNAME, false, false);
}
@Test
void testLayoutZlibCompression() throws Exception {
testCompressedLayout(CompressionType.ZLIB, true, true, HOSTNAME, false, false);
}
@Test
void testLayoutNoStacktrace() throws Exception {
testCompressedLayout(CompressionType.OFF, false, true, HOSTNAME, false, false);
}
@Test
void testLayoutNoThreadContext() throws Exception {
testCompressedLayout(CompressionType.OFF, true, false, HOSTNAME, false, false);
}
@Test
void testLayoutNoHost() throws Exception {
testCompressedLayout(CompressionType.OFF, true, true, null, false, false);
}
@Test
void testLayoutNullDelimiter() throws Exception {
testCompressedLayout(CompressionType.OFF, false, true, HOSTNAME, true, false);
}
@Test
void testLayoutNewLineDelimiter() throws Exception {
testCompressedLayout(CompressionType.OFF, true, true, HOSTNAME, false, true);
}
@Test
void testFormatTimestamp() {
assertEquals("0", GelfLayout.formatTimestamp(0L).toString());
assertEquals("1.000", GelfLayout.formatTimestamp(1000L).toString());
assertEquals("1.001", GelfLayout.formatTimestamp(1001L).toString());
assertEquals("1.010", GelfLayout.formatTimestamp(1010L).toString());
assertEquals("1.100", GelfLayout.formatTimestamp(1100L).toString());
assertEquals(
"1458741206.653", GelfLayout.formatTimestamp(1458741206653L).toString());
assertEquals(
"9223372036854775.807",
GelfLayout.formatTimestamp(Long.MAX_VALUE).toString());
}
private void testRequiresLocation(final String messagePattern, final Boolean requiresLocation) {
final GelfLayout layout =
GelfLayout.newBuilder().setMessagePattern(messagePattern).build();
assertEquals(layout.requiresLocation(), requiresLocation);
}
@Test
void testRequiresLocationPatternNotSet() {
testRequiresLocation(null, false);
}
@Test
void testRequiresLocationPatternNotContainsLocation() {
testRequiresLocation("%m %n", false);
}
@Test
void testRequiresLocationPatternContainsLocation() {
testRequiresLocation("%C %m %t", true);
}
}
|
GelfLayoutTest
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/admin/ListConsumerGroupOffsetsSpec.java
|
{
"start": 1072,
"end": 2548
}
|
class ____ {
private Collection<TopicPartition> topicPartitions;
/**
* Set the topic partitions whose offsets are to be listed for a consumer group.
* {@code null} includes all topic partitions.
*
* @param topicPartitions List of topic partitions to include
* @return This ListConsumerGroupOffsetSpec
*/
public ListConsumerGroupOffsetsSpec topicPartitions(Collection<TopicPartition> topicPartitions) {
this.topicPartitions = topicPartitions;
return this;
}
/**
* Returns the topic partitions whose offsets are to be listed for a consumer group.
* {@code null} indicates that offsets of all partitions of the group are to be listed.
*/
public Collection<TopicPartition> topicPartitions() {
return topicPartitions;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ListConsumerGroupOffsetsSpec)) {
return false;
}
ListConsumerGroupOffsetsSpec that = (ListConsumerGroupOffsetsSpec) o;
return Objects.equals(topicPartitions, that.topicPartitions);
}
@Override
public int hashCode() {
return Objects.hash(topicPartitions);
}
@Override
public String toString() {
return "ListConsumerGroupOffsetsSpec(" +
"topicPartitions=" + topicPartitions +
')';
}
}
|
ListConsumerGroupOffsetsSpec
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/matchers/EqualityTest.java
|
{
"start": 375,
"end": 1313
}
|
class ____ extends TestBase {
@Test
public void shouldKnowIfObjectsAreEqual() throws Exception {
int[] arr = new int[] {1, 2};
assertTrue(areEqual(arr, arr));
assertTrue(areEqual(new int[] {1, 2}, new int[] {1, 2}));
assertTrue(areEqual(new Double[] {1.0}, new Double[] {1.0}));
assertTrue(areEqual(new String[0], new String[0]));
assertTrue(areEqual(new Object[10], new Object[10]));
assertTrue(areEqual(new int[] {1}, new Integer[] {1}));
assertTrue(areEqual(new Object[] {"1"}, new String[] {"1"}));
Object badequals = new BadEquals();
assertTrue(areEqual(badequals, badequals));
assertFalse(areEqual(new Object[9], new Object[10]));
assertFalse(areEqual(new int[] {1, 2}, new int[] {1}));
assertFalse(areEqual(new int[] {1}, new double[] {1.0}));
}
@SuppressWarnings("EqualsHashCode")
private final
|
EqualityTest
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/util/context/CoreContextTest.java
|
{
"start": 809,
"end": 3049
}
|
class ____ {
@Test
public void putAllForeignSmallSize() {
Context initial = Context.of(1, "A", 2, "B", 3, "C");
Context other = new ContextTest.ForeignContext("staticKey", "staticValue");
Context result = initial.putAll(other.readOnly());
assertThat(result).isInstanceOf(CoreContext.class)
.isInstanceOf(Context4.class);
Context4 context4 = (Context4) result;
assertThat(context4.key1).as("key1").isEqualTo(1);
assertThat(context4.value1).as("value1").isEqualTo("A");
assertThat(context4.key2).as("key2").isEqualTo(2);
assertThat(context4.value2).as("value2").isEqualTo("B");
assertThat(context4.key3).as("key3").isEqualTo(3);
assertThat(context4.value3).as("value3").isEqualTo("C");
assertThat(context4.key4).as("key4").isEqualTo("staticKey");
assertThat(context4.value4).as("value4").isEqualTo("staticValue");
}
@Test
public void putAllForeignMiddleSize() {
Context initial = Context.of(1, "value1", 2, "value2", 3, "value3", 4, "value4");
ContextTest.ForeignContext other = new ContextTest.ForeignContext(1, "replaced")
.directPut(5, "value5")
.directPut(6, "value6");
Context result = initial.putAll(other.readOnly());
assertThat(result).isInstanceOf(ContextN.class);
ContextN resultN = (ContextN) result;
assertThat(resultN)
.containsKeys(1, 2, 3, 4, 5)
.containsValues("replaced", "value2", "value3", "value4", "value5");
}
@Test
public void mergeTwoSmallContextResultInContext4() {
Context a = Context.of(1, "value1", 2, "value2");
CoreContext b = (CoreContext) Context.of(1, "replaced", 3, "value3", 4, "value4");
Context result = a.putAll(b.readOnly());
assertThat(result).isInstanceOf(Context4.class);
Context4 context4 = (Context4) result;
assertThat(context4.key1).as("key1").isEqualTo(1);
assertThat(context4.value1).as("value1").isEqualTo("replaced");
assertThat(context4.key2).as("key2").isEqualTo(2);
assertThat(context4.value2).as("value2").isEqualTo("value2");
assertThat(context4.key3).as("key3").isEqualTo(3);
assertThat(context4.value3).as("value3").isEqualTo("value3");
assertThat(context4.key4).as("key4").isEqualTo(4);
assertThat(context4.value4).as("value4").isEqualTo("value4");
}
}
|
CoreContextTest
|
java
|
alibaba__nacos
|
core/src/test/java/com/alibaba/nacos/core/cluster/lookup/AddressServerMemberLookupTest.java
|
{
"start": 2268,
"end": 6494
}
|
class ____ {
private final GenericType<String> genericType = new GenericType<String>() {
};
@Mock
private NacosRestTemplate restTemplate;
@Mock
private ServerMemberManager memberManager;
@Mock
private HttpRestResult<String> result;
private AddressServerMemberLookup addressServerMemberLookup;
private String addressUrl;
private String envIdUrl;
private String addressServerUrl;
private String addressPort;
private String domainName;
@Mock
private ConfigurableEnvironment environment;
@BeforeEach
void setUp() throws Exception {
EnvUtil.setEnvironment(environment);
when(environment.getProperty("maxHealthCheckFailCount", "12")).thenReturn("12");
when(environment.getProperty("nacos.core.address-server.retry", Integer.class, 5)).thenReturn(5);
when(environment.getProperty("address.server.domain", "jmenv.tbsite.net")).thenReturn("jmenv.tbsite.net");
when(environment.getProperty("address.server.port", "8080")).thenReturn("8080");
when(environment.getProperty(eq("address.server.url"), any(String.class))).thenReturn("/nacos/serverlist");
when(environment.getProperty(Constants.WEB_CONTEXT_PATH)).thenReturn("/nacos");
initAddressSys();
when(restTemplate.<String>get(eq(addressServerUrl), any(Header.EMPTY.getClass()), any(Query.EMPTY.getClass()),
any(Type.class))).thenReturn(result);
addressServerMemberLookup = new AddressServerMemberLookup();
ReflectionTestUtils.setField(addressServerMemberLookup, "restTemplate", restTemplate);
when(result.ok()).thenReturn(true);
when(result.getData()).thenReturn("1.1.1.1:8848");
addressServerMemberLookup.start();
}
@AfterEach
void tearDown() throws NacosException {
addressServerMemberLookup.destroy();
}
@Test
void testMemberChange() throws Exception {
addressServerMemberLookup.injectMemberManager(memberManager);
verify(restTemplate).get(eq(addressServerUrl), any(Header.EMPTY.getClass()), any(Query.EMPTY.getClass()), any(Type.class));
}
@Test
void testInfo() {
Map<String, Object> infos = addressServerMemberLookup.info();
assertEquals(4, infos.size());
assertTrue(infos.containsKey("addressServerHealth"));
assertTrue(infos.containsKey("addressServerUrl"));
assertTrue(infos.containsKey("envIdUrl"));
assertTrue(infos.containsKey("addressServerFailCount"));
assertEquals(addressServerUrl, infos.get("addressServerUrl"));
assertEquals(envIdUrl, infos.get("envIdUrl"));
}
@Test
void testSyncFromAddressUrl() throws Exception {
RestResult<String> result = restTemplate.get(addressServerUrl, Header.EMPTY, Query.EMPTY, genericType.getType());
assertEquals("1.1.1.1:8848", result.getData());
}
private void initAddressSys() {
String envDomainName = System.getenv("address_server_domain");
if (StringUtils.isBlank(envDomainName)) {
domainName = EnvUtil.getProperty("address.server.domain", "jmenv.tbsite.net");
} else {
domainName = envDomainName;
}
String envAddressPort = System.getenv("address_server_port");
if (StringUtils.isBlank(envAddressPort)) {
addressPort = EnvUtil.getProperty("address.server.port", "8080");
} else {
addressPort = envAddressPort;
}
String envAddressUrl = System.getenv("address_server_url");
if (StringUtils.isBlank(envAddressUrl)) {
addressUrl = EnvUtil.getProperty("address.server.url", EnvUtil.getContextPath() + "/" + "serverlist");
} else {
addressUrl = envAddressUrl;
}
addressServerUrl = HTTP_PREFIX + domainName + ":" + addressPort + addressUrl;
envIdUrl = HTTP_PREFIX + domainName + ":" + addressPort + "/env";
Loggers.CORE.info("ServerListService address-server port:" + addressPort);
Loggers.CORE.info("ADDRESS_SERVER_URL:" + addressServerUrl);
}
}
|
AddressServerMemberLookupTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/dataflow/nullnesspropagation/NullnessInferenceTest.java
|
{
"start": 20091,
"end": 20730
}
|
interface ____<T extends @NonNull Object> {
T get();
}
}
""")
.doTest();
}
@Test
public void defaultAnnotation() {
compilationHelper
.addSourceLines(
"DefaultAnnotationTest.java",
"package com.google.errorprone.dataflow.nullnesspropagation;",
"import static com.google.errorprone.dataflow.nullnesspropagation."
+ "NullnessInferenceTest.inspectInferredExpression;",
"import org.checkerframework.checker.nullness.qual.Nullable;",
"import org.checkerframework.checker.nullness.qual.NonNull;",
"public
|
NonNullElementCollection
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
|
{
"start": 5125,
"end": 5739
}
|
class ____ extends SecurityInfo {
@Override
public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
return new TokenInfo() {
@Override
public Class<? extends Annotation> annotationType() {
return null;
}
@Override
public Class<? extends TokenSelector<? extends TokenIdentifier>>
value() {
return ClientToAMTokenSelector.class;
}
};
}
@Override
public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
return null;
}
};
private static
|
CustomSecurityInfo
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/util/JsonExpectationsHelper.java
|
{
"start": 842,
"end": 1124
}
|
class ____ the <a
* href="https://jsonassert.skyscreamer.org/">JSONassert</a> library.
*
* @author Sebastien Deleuze
* @since 4.1
* @deprecated in favor of using {@link JSONAssert} directly or the
* {@link JsonComparator} abstraction
*/
@Deprecated(since = "6.2")
public
|
requires
|
java
|
apache__maven
|
compat/maven-compat/src/test/java/org/apache/maven/artifact/installer/ArtifactInstallerTest.java
|
{
"start": 1292,
"end": 2194
}
|
class ____ extends AbstractArtifactComponentTestCase {
@Inject
private ArtifactInstaller artifactInstaller;
@Inject
private SessionScope sessionScope;
protected String component() {
return "installer";
}
@Test
void testArtifactInstallation() throws Exception {
sessionScope.enter();
try {
sessionScope.seed(MavenSession.class, mock(MavenSession.class));
String artifactBasedir = new File(getBasedir(), "src/test/resources/artifact-install").getAbsolutePath();
Artifact artifact = createArtifact("artifact", "1.0");
File source = new File(artifactBasedir, "artifact-1.0.jar");
artifactInstaller.install(source, artifact, localRepository());
assertLocalArtifactPresent(artifact);
} finally {
sessionScope.exit();
}
}
}
|
ArtifactInstallerTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/BlockingWithFilterTest.java
|
{
"start": 841,
"end": 1708
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(TestFilter.class, TestResource.class);
}
});
@Test
public void requestFilterTest() {
String response = RestAssured.get("/test/request")
.then().statusCode(200).contentType("text/plain").extract().body().asString();
String[] parts = response.split("/");
assertEquals(2, parts.length);
assertEquals(parts[0], parts[1]);
assertFalse(parts[0].contains("eventloop"));
assertTrue(parts[0].contains("executor"));
}
public static
|
BlockingWithFilterTest
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/features/externalconfig/typesafeconfigurationproperties/constructorbinding/MyProperties.java
|
{
"start": 987,
"end": 1591
}
|
class ____ {
// @fold:on // fields...
private final boolean enabled;
private final InetAddress remoteAddress;
private final Security security;
// @fold:off
public MyProperties(boolean enabled, InetAddress remoteAddress, Security security) {
this.enabled = enabled;
this.remoteAddress = remoteAddress;
this.security = security;
}
// @fold:on // getters...
public boolean isEnabled() {
return this.enabled;
}
public InetAddress getRemoteAddress() {
return this.remoteAddress;
}
public Security getSecurity() {
return this.security;
}
// @fold:off
public static
|
MyProperties
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/simp/user/SimpSession.java
|
{
"start": 803,
"end": 1067
}
|
interface ____ {
/**
* Return the session id.
*/
String getId();
/**
* Return the user associated with the session.
*/
SimpUser getUser();
/**
* Return the subscriptions for this session.
*/
Set<SimpSubscription> getSubscriptions();
}
|
SimpSession
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-oauth2-authorization-server/src/test/java/smoketest/oauth2/server/SampleOAuth2AuthorizationServerApplicationTests.java
|
{
"start": 2202,
"end": 9043
}
|
class ____ {
private static final ParameterizedTypeReference<Map<String, Object>> MAP_TYPE_REFERENCE = new ParameterizedTypeReference<>() {
};
@LocalServerPort
private int port;
@Autowired
private TestRestTemplate restTemplate;
@Test
void openidConfigurationShouldAllowAccess() {
ResponseEntity<Map<String, Object>> entity = this.restTemplate.exchange("/.well-known/openid-configuration",
HttpMethod.GET, null, MAP_TYPE_REFERENCE);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
OidcProviderConfiguration config = OidcProviderConfiguration.withClaims(entity.getBody()).build();
assertThat(config.getIssuer()).hasToString("https://provider.com");
assertThat(config.getAuthorizationEndpoint()).hasToString("https://provider.com/authorize");
assertThat(config.getTokenEndpoint()).hasToString("https://provider.com/token");
assertThat(config.getJwkSetUrl()).hasToString("https://provider.com/jwks");
assertThat(config.getTokenRevocationEndpoint()).hasToString("https://provider.com/revoke");
assertThat(config.getEndSessionEndpoint()).hasToString("https://provider.com/logout");
assertThat(config.getTokenIntrospectionEndpoint()).hasToString("https://provider.com/introspect");
assertThat(config.getUserInfoEndpoint()).hasToString("https://provider.com/user");
// PAR endpoint and OIDC Client Registration are disabled by default
assertThat(config.getClientRegistrationEndpoint()).isNull();
assertThat(config.getPushedAuthorizationRequestEndpoint()).isNull();
}
@Test
void authServerMetadataShouldAllowAccess() {
ResponseEntity<Map<String, Object>> entity = this.restTemplate
.exchange("/.well-known/oauth-authorization-server", HttpMethod.GET, null, MAP_TYPE_REFERENCE);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
OAuth2AuthorizationServerMetadata config = OAuth2AuthorizationServerMetadata.withClaims(entity.getBody())
.build();
assertThat(config.getIssuer()).hasToString("https://provider.com");
assertThat(config.getAuthorizationEndpoint()).hasToString("https://provider.com/authorize");
assertThat(config.getTokenEndpoint()).hasToString("https://provider.com/token");
assertThat(config.getJwkSetUrl()).hasToString("https://provider.com/jwks");
assertThat(config.getTokenRevocationEndpoint()).hasToString("https://provider.com/revoke");
assertThat(config.getTokenIntrospectionEndpoint()).hasToString("https://provider.com/introspect");
// PAR endpoint and OIDC Client Registration are disabled by default
assertThat(config.getClientRegistrationEndpoint()).isNull();
assertThat(config.getPushedAuthorizationRequestEndpoint()).isNull();
}
@Test
void anonymousShouldRedirectToLogin() {
ResponseEntity<String> entity = this.restTemplate.withRedirects(HttpRedirects.DONT_FOLLOW)
.getForEntity("/", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.FOUND);
assertThat(entity.getHeaders().getLocation()).isEqualTo(URI.create("http://localhost:" + this.port + "/login"));
}
@Test
void validTokenRequestShouldReturnTokenResponse() {
HttpHeaders headers = new HttpHeaders();
headers.setBasicAuth("messaging-client", "secret");
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
MultiValueMap<String, Object> body = new LinkedMultiValueMap<>();
body.add(OAuth2ParameterNames.CLIENT_ID, "messaging-client");
body.add(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.CLIENT_CREDENTIALS.getValue());
body.add(OAuth2ParameterNames.SCOPE, "message.read message.write");
HttpEntity<Object> request = new HttpEntity<>(body, headers);
ResponseEntity<Map<String, Object>> entity = this.restTemplate.exchange("/token", HttpMethod.POST, request,
MAP_TYPE_REFERENCE);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
Map<String, Object> tokenResponse = Objects.requireNonNull(entity.getBody());
assertThat(tokenResponse.get(OAuth2ParameterNames.ACCESS_TOKEN)).isNotNull();
assertThat(tokenResponse.get(OAuth2ParameterNames.EXPIRES_IN)).isNotNull();
assertThat(tokenResponse.get(OAuth2ParameterNames.SCOPE)).isEqualTo("message.read message.write");
assertThat(tokenResponse.get(OAuth2ParameterNames.TOKEN_TYPE))
.isEqualTo(OAuth2AccessToken.TokenType.BEARER.getValue());
}
@Test
void anonymousTokenRequestShouldReturnUnauthorized() {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
MultiValueMap<String, Object> body = new LinkedMultiValueMap<>();
body.add(OAuth2ParameterNames.CLIENT_ID, "messaging-client");
body.add(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.CLIENT_CREDENTIALS.getValue());
body.add(OAuth2ParameterNames.SCOPE, "message.read message.write");
HttpEntity<Object> request = new HttpEntity<>(body, headers);
ResponseEntity<Map<String, Object>> entity = this.restTemplate.exchange("/token", HttpMethod.POST, request,
MAP_TYPE_REFERENCE);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
void anonymousTokenRequestWithAcceptHeaderAllShouldReturnUnauthorized() {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
headers.setAccept(List.of(MediaType.ALL));
MultiValueMap<String, Object> body = new LinkedMultiValueMap<>();
body.add(OAuth2ParameterNames.CLIENT_ID, "messaging-client");
body.add(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.CLIENT_CREDENTIALS.getValue());
body.add(OAuth2ParameterNames.SCOPE, "message.read message.write");
HttpEntity<Object> request = new HttpEntity<>(body, headers);
ResponseEntity<Map<String, Object>> entity = this.restTemplate.exchange("/token", HttpMethod.POST, request,
MAP_TYPE_REFERENCE);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
void anonymousTokenRequestWithAcceptHeaderTextHtmlShouldRedirectToLogin() {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
headers.setAccept(List.of(MediaType.TEXT_HTML));
MultiValueMap<String, Object> body = new LinkedMultiValueMap<>();
body.add(OAuth2ParameterNames.CLIENT_ID, "messaging-client");
body.add(OAuth2ParameterNames.GRANT_TYPE, AuthorizationGrantType.CLIENT_CREDENTIALS.getValue());
body.add(OAuth2ParameterNames.SCOPE, "message.read message.write");
HttpEntity<Object> request = new HttpEntity<>(body, headers);
ResponseEntity<Map<String, Object>> entity = this.restTemplate.withRedirects(HttpRedirects.DONT_FOLLOW)
.exchange("/token", HttpMethod.POST, request, MAP_TYPE_REFERENCE);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.FOUND);
assertThat(entity.getHeaders().getLocation()).isEqualTo(URI.create("http://localhost:" + this.port + "/login"));
}
}
|
SampleOAuth2AuthorizationServerApplicationTests
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/bcextensions/ObserverInfoImpl.java
|
{
"start": 594,
"end": 3303
}
|
class ____ implements ObserverInfo {
private final org.jboss.jandex.IndexView jandexIndex;
private final org.jboss.jandex.MutableAnnotationOverlay annotationOverlay;
private final io.quarkus.arc.processor.ObserverInfo arcObserverInfo;
ObserverInfoImpl(org.jboss.jandex.IndexView jandexIndex, org.jboss.jandex.MutableAnnotationOverlay annotationOverlay,
io.quarkus.arc.processor.ObserverInfo arcObserverInfo) {
this.jandexIndex = jandexIndex;
this.annotationOverlay = annotationOverlay;
this.arcObserverInfo = arcObserverInfo;
}
@Override
public Type eventType() {
return TypeImpl.fromJandexType(jandexIndex, annotationOverlay, arcObserverInfo.getObservedType());
}
@Override
public Collection<AnnotationInfo> qualifiers() {
return arcObserverInfo.getQualifiers()
.stream()
.map(it -> (AnnotationInfo) new AnnotationInfoImpl(jandexIndex, annotationOverlay, it))
.toList();
}
@Override
public ClassInfo declaringClass() {
org.jboss.jandex.ClassInfo jandexClass = jandexIndex.getClassByName(arcObserverInfo.getBeanClass());
return new ClassInfoImpl(jandexIndex, annotationOverlay, jandexClass);
}
@Override
public MethodInfo observerMethod() {
if (arcObserverInfo.isSynthetic()) {
return null;
}
return new MethodInfoImpl(jandexIndex, annotationOverlay, arcObserverInfo.getObserverMethod());
}
@Override
public ParameterInfo eventParameter() {
if (arcObserverInfo.isSynthetic()) {
return null;
}
org.jboss.jandex.MethodParameterInfo jandexParameter = arcObserverInfo.getEventParameter();
return new ParameterInfoImpl(jandexIndex, annotationOverlay, jandexParameter);
}
@Override
public BeanInfo bean() {
if (arcObserverInfo.isSynthetic()) {
return null;
}
return BeanInfoImpl.create(jandexIndex, annotationOverlay, arcObserverInfo.getDeclaringBean());
}
@Override
public boolean isSynthetic() {
return arcObserverInfo.isSynthetic();
}
@Override
public int priority() {
return arcObserverInfo.getPriority();
}
@Override
public boolean isAsync() {
return arcObserverInfo.isAsync();
}
@Override
public Reception reception() {
return arcObserverInfo.getReception();
}
@Override
public TransactionPhase transactionPhase() {
return arcObserverInfo.getTransactionPhase();
}
@Override
public String toString() {
return arcObserverInfo.toString();
}
}
|
ObserverInfoImpl
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/nestedtargetproperties/ChartEntryToArtist.java
|
{
"start": 654,
"end": 2476
}
|
class ____ {
public static final ChartEntryToArtist MAPPER = Mappers.getMapper( ChartEntryToArtist.class );
@Mappings({
@Mapping(target = "type", ignore = true),
@Mapping(target = "name", source = "chartName"),
@Mapping(target = "song.title", source = "songTitle" ),
@Mapping(target = "song.artist.name", source = "artistName" ),
@Mapping(target = "song.artist.label.studio.name", source = "recordedAt"),
@Mapping(target = "song.artist.label.studio.city", source = "city" ),
@Mapping(target = "song.positions", source = "position" )
})
public abstract Chart map(ChartEntry chartEntry);
@Mappings({
@Mapping(target = "type", ignore = true),
@Mapping(target = "name", source = "chartEntry2.chartName"),
@Mapping(target = "song.title", source = "chartEntry1.songTitle" ),
@Mapping(target = "song.artist.name", source = "chartEntry1.artistName" ),
@Mapping(target = "song.artist.label.studio.name", source = "chartEntry1.recordedAt"),
@Mapping(target = "song.artist.label.studio.city", source = "chartEntry1.city" ),
@Mapping(target = "song.positions", source = "chartEntry2.position" )
})
public abstract Chart map(ChartEntry chartEntry1, ChartEntry chartEntry2);
@InheritInverseConfiguration
public abstract ChartEntry map(Chart chart);
protected List<Integer> mapPosition(Integer in) {
if ( in != null ) {
return new ArrayList<>( Arrays.asList( in ) );
}
else {
return new ArrayList<>();
}
}
protected Integer mapPosition(List<Integer> in) {
if ( in != null && !in.isEmpty() ) {
return in.get( 0 );
}
else {
return null;
}
}
}
|
ChartEntryToArtist
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ScalarSubquery.java
|
{
"start": 626,
"end": 1369
}
|
class ____ extends SubQueryExpression {
public ScalarSubquery(Source source, LogicalPlan query) {
this(source, query, null);
}
public ScalarSubquery(Source source, LogicalPlan query, NameId id) {
super(source, query, id);
}
@Override
protected NodeInfo<ScalarSubquery> info() {
return NodeInfo.create(this, ScalarSubquery::new, query(), id());
}
@Override
protected ScalarSubquery clone(LogicalPlan newQuery) {
return new ScalarSubquery(source(), newQuery);
}
@Override
public DataType dataType() {
throw new UnsupportedOperationException();
}
@Override
public Nullability nullable() {
return Nullability.TRUE;
}
}
|
ScalarSubquery
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/OAuth2AuthorizedClientManager.java
|
{
"start": 924,
"end": 1634
}
|
interface ____ responsible for the overall management of
* {@link OAuth2AuthorizedClient Authorized Client(s)}.
*
* <p>
* The primary responsibilities include:
* <ol>
* <li>Authorizing (or re-authorizing) an OAuth 2.0 Client by leveraging an
* {@link OAuth2AuthorizedClientProvider}(s).</li>
* <li>Delegating the persistence of an {@link OAuth2AuthorizedClient}, typically using an
* {@link OAuth2AuthorizedClientService} OR {@link OAuth2AuthorizedClientRepository}.</li>
* </ol>
*
* @author Joe Grandja
* @since 5.2
* @see OAuth2AuthorizedClient
* @see OAuth2AuthorizedClientProvider
* @see OAuth2AuthorizedClientService
* @see OAuth2AuthorizedClientRepository
*/
@FunctionalInterface
public
|
are
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TableFactoryHarness.java
|
{
"start": 12895,
"end": 14000
}
|
class ____ extends SourceBase implements ScanTableSource {
private final boolean bounded;
public ScanSourceBase() {
this(true);
}
public ScanSourceBase(boolean bounded) {
this.bounded = bounded;
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
@Override
public ScanRuntimeProvider getScanRuntimeProvider(ScanContext runtimeProviderContext) {
return SourceProvider.of(
new DataGeneratorSource<>(
(GeneratorFunction<Long, RowData>)
value -> {
throw new UnsupportedOperationException(
"TableFactoryHarness no-op source should not generate data");
},
0L, // Generate 0 elements (no-op)
TypeInformation.of(RowData.class)));
}
}
/**
* Base
|
ScanSourceBase
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/eventbus/EventBusTest.java
|
{
"start": 11568,
"end": 11618
}
|
interface ____<T> {
void call(T t);
}
}
|
Callback
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/TestQueuePlacementConverter.java
|
{
"start": 3456,
"end": 18524
}
|
class ____ {
private static final String DEFAULT_QUEUE = "root.default";
@Mock
private PlacementManager placementManager;
@Mock
private FSConfigToCSConfigRuleHandler ruleHandler;
private QueuePlacementConverter converter;
private CapacitySchedulerConfiguration csConf;
@BeforeEach
public void setup() {
this.converter = new QueuePlacementConverter();
this.csConf = new CapacitySchedulerConfiguration(
new Configuration(false));
}
@Test
public void testConvertUserRule() {
PlacementRule fsRule = mock(UserPlacementRule.class);
initPlacementManagerMock(fsRule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.USER);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertSpecifiedRule() {
PlacementRule fsRule = mock(SpecifiedPlacementRule.class);
initPlacementManagerMock(fsRule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.SPECIFIED);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertPrimaryGroupRule() {
PlacementRule fsRule = mock(PrimaryGroupPlacementRule.class);
initPlacementManagerMock(fsRule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.PRIMARY_GROUP);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertSecondaryGroupRule() {
PlacementRule fsRule = mock(SecondaryGroupExistingPlacementRule.class);
initPlacementManagerMock(fsRule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.SECONDARY_GROUP);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertDefaultRuleWithQueueName() {
DefaultPlacementRule fsRule = mock(DefaultPlacementRule.class);
fsRule.defaultQueueName = "abc";
initPlacementManagerMock(fsRule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.CUSTOM);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertDefaultRule() {
DefaultPlacementRule fsRule = mock(DefaultPlacementRule.class);
fsRule.defaultQueueName = DEFAULT_QUEUE;
initPlacementManagerMock(fsRule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.DEFAULT_QUEUE);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertUnsupportedRule() {
assertThrows(IllegalArgumentException.class, ()->{
PlacementRule rule = mock(TestPlacementRule.class);
initPlacementManagerMock(rule);
// throws exception
convert();
});
}
@Test
public void testConvertRejectRule() {
PlacementRule rule = mock(RejectPlacementRule.class);
initPlacementManagerMock(rule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.REJECT);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertNestedPrimaryGroupRule() {
UserPlacementRule rule = mock(UserPlacementRule.class);
PrimaryGroupPlacementRule parent = mock(PrimaryGroupPlacementRule.class);
when(rule.getParentRule()).thenReturn(parent);
initPlacementManagerMock(rule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.PRIMARY_GROUP_USER);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertNestedSecondaryGroupRule() {
UserPlacementRule rule = mock(UserPlacementRule.class);
SecondaryGroupExistingPlacementRule parent =
mock(SecondaryGroupExistingPlacementRule.class);
when(rule.getParentRule()).thenReturn(parent);
initPlacementManagerMock(rule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.SECONDARY_GROUP_USER);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertNestedDefaultRule() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
DefaultPlacementRule parent =
mock(DefaultPlacementRule.class);
parent.defaultQueueName = "root.abc";
when(fsRule.getParentRule()).thenReturn(parent);
initPlacementManagerMock(fsRule);
MappingRulesDescription description = convert();
assertEquals(1, description.getRules().size(), "Number of rules");
Rule rule = description.getRules().get(0);
verifyRule(description.getRules().get(0), Policy.USER);
assertEquals("root.abc", rule.getParentQueue(), "Parent path");
verifyZeroInteractions(ruleHandler);
}
@Test
public void testUnsupportedNestedParentRule() {
assertThrows(IllegalArgumentException.class, ()->{
UserPlacementRule fsRule = mock(UserPlacementRule.class);
TestPlacementRule parent =
mock(TestPlacementRule.class);
when(fsRule.getParentRule()).thenReturn(parent);
initPlacementManagerMock(fsRule);
// throws exception
convert();
});
}
@Test
public void testConvertMultiplePlacementRules() {
UserPlacementRule rule1 = mock(UserPlacementRule.class);
PrimaryGroupPlacementRule rule2 =
mock(PrimaryGroupPlacementRule.class);
SecondaryGroupExistingPlacementRule rule3 =
mock(SecondaryGroupExistingPlacementRule.class);
initPlacementManagerMock(rule1, rule2, rule3);
MappingRulesDescription description = convert();
assertEquals(3, description.getRules().size(), "Number of rules");
verifyRule(description.getRules().get(0), Policy.USER);
verifyRule(description.getRules().get(1), Policy.PRIMARY_GROUP);
verifyRule(description.getRules().get(2), Policy.SECONDARY_GROUP);
verifyZeroInteractions(ruleHandler);
}
@Test
public void testConvertPrimaryGroupRuleWithCreate() {
FSPlacementRule fsRule = mock(PrimaryGroupPlacementRule.class);
when(fsRule.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
convert();
verify(ruleHandler).handleRuleAutoCreateFlag(eq("root.<primaryGroup>"));
verifyNoMoreInteractions(ruleHandler);
}
@Test
public void testConvertSecondaryGroupRuleWithCreate() {
FSPlacementRule fsRule = mock(SecondaryGroupExistingPlacementRule.class);
when(fsRule.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
convert();
verify(ruleHandler).handleRuleAutoCreateFlag(eq("root.<secondaryGroup>"));
verifyNoMoreInteractions(ruleHandler);
}
@Test
public void testConvertNestedPrimaryGroupRuleWithCreate() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
PrimaryGroupPlacementRule parent = mock(PrimaryGroupPlacementRule.class);
when(fsRule.getParentRule()).thenReturn(parent);
when(fsRule.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
convert();
verify(ruleHandler).handleRuleAutoCreateFlag(eq("root.<primaryGroup>"));
verifyNoMoreInteractions(ruleHandler);
}
@Test
public void testConvertNestedSecondaryGroupRuleWithCreate() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
SecondaryGroupExistingPlacementRule parent =
mock(SecondaryGroupExistingPlacementRule.class);
when(fsRule.getParentRule()).thenReturn(parent);
when(fsRule.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
convert();
verify(ruleHandler).handleRuleAutoCreateFlag(eq("root.<secondaryGroup>"));
verifyNoMoreInteractions(ruleHandler);
}
@Test
public void testConvertNestedDefaultGroupWithCreate() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
DefaultPlacementRule parent =
mock(DefaultPlacementRule.class);
parent.defaultQueueName = "root.abc";
when(fsRule.getParentRule()).thenReturn(parent);
when(fsRule.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
convert();
verify(ruleHandler).handleRuleAutoCreateFlag(eq("root.abc"));
verifyNoMoreInteractions(ruleHandler);
}
@Test
public void testConvertNestedRuleCreateFalseFalseInWeightMode() {
testConvertNestedRuleCreateFlagInWeightMode(false, false,
false, false);
}
@Test
public void testConvertNestedRuleCreateFalseTrueInWeightMode() {
testConvertNestedRuleCreateFlagInWeightMode(false, true,
true, true);
}
@Test
public void testConvertNestedRuleCreateTrueFalseInWeightMode() {
testConvertNestedRuleCreateFlagInWeightMode(true, false,
true, true);
}
@Test
public void testConvertNestedRuleCreateTrueTrueInWeightMode() {
testConvertNestedRuleCreateFlagInWeightMode(true, true,
true, false);
}
private void testConvertNestedRuleCreateFlagInWeightMode(
boolean parentCreate,
boolean childCreate,
boolean expectedFlagOnRule,
boolean ruleHandlerShouldBeInvoked) {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
PrimaryGroupPlacementRule parent = mock(PrimaryGroupPlacementRule.class);
when(parent.getCreateFlag()).thenReturn(parentCreate);
when(fsRule.getParentRule()).thenReturn(parent);
when(fsRule.getCreateFlag()).thenReturn(childCreate);
initPlacementManagerMock(fsRule);
MappingRulesDescription desc = convertInWeightMode();
Rule rule = desc.getRules().get(0);
assertEquals(expectedFlagOnRule, rule.getCreate(), "Expected create flag");
if (ruleHandlerShouldBeInvoked) {
verify(ruleHandler).handleFSParentAndChildCreateFlagDiff(
any(Policy.class));
verifyNoMoreInteractions(ruleHandler);
} else {
verifyZeroInteractions(ruleHandler);
}
}
@Test
public void testParentSetToRootInWeightModeUserPolicy() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
testParentSetToRootInWeightMode(fsRule);
}
@Test
public void testParentSetToRootInWeightModePrimaryGroupPolicy() {
PrimaryGroupPlacementRule fsRule = mock(PrimaryGroupPlacementRule.class);
testParentSetToRootInWeightMode(fsRule);
}
@Test
public void testParentSetToRootInWeightModePrimaryGroupUserPolicy() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
PrimaryGroupPlacementRule parent = mock(PrimaryGroupPlacementRule.class);
when(fsRule.getParentRule()).thenReturn(parent);
testParentSetToRootInWeightMode(fsRule);
}
@Test
public void testParentSetToRootInWeightModeSecondaryGroupPolicy() {
SecondaryGroupExistingPlacementRule fsRule =
mock(SecondaryGroupExistingPlacementRule.class);
testParentSetToRootInWeightMode(fsRule);
}
@Test
public void testParentSetToRootInWeightModeSecondaryGroupUserPolicy() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
SecondaryGroupExistingPlacementRule parent =
mock(SecondaryGroupExistingPlacementRule.class);
when(fsRule.getParentRule()).thenReturn(parent);
testParentSetToRootInWeightMode(fsRule);
}
private void testParentSetToRootInWeightMode(FSPlacementRule fsRule) {
initPlacementManagerMock(fsRule);
MappingRulesDescription desc = convertInWeightMode();
Rule rule = desc.getRules().get(0);
assertEquals("root", rule.getParentQueue(), "Parent queue");
}
@Test
public void testConvertNestedPrimaryGroupRuleWithParentCreate() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
PrimaryGroupPlacementRule parent = mock(PrimaryGroupPlacementRule.class);
when(fsRule.getParentRule()).thenReturn(parent);
when(parent.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
convert();
verify(ruleHandler).handleFSParentCreateFlag(eq("root.<primaryGroup>"));
verifyNoMoreInteractions(ruleHandler);
}
@Test
public void testConvertNestedSecondaryGroupRuleWithParentCreate() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
SecondaryGroupExistingPlacementRule parent =
mock(SecondaryGroupExistingPlacementRule.class);
when(fsRule.getParentRule()).thenReturn(parent);
when(parent.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
convert();
verify(ruleHandler).handleFSParentCreateFlag(eq("root.<secondaryGroup>"));
verifyNoMoreInteractions(ruleHandler);
}
@Test
public void testConvertNestedDefaultGroupWithParentCreate() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
DefaultPlacementRule parent =
mock(DefaultPlacementRule.class);
parent.defaultQueueName = "root.abc";
when(fsRule.getParentRule()).thenReturn(parent);
when(parent.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
convert();
verify(ruleHandler).handleFSParentCreateFlag(eq("root.abc"));
verifyNoMoreInteractions(ruleHandler);
}
@Test
public void testConvertNestedDefaultWithConflictingQueues() {
UserPlacementRule fsRule = mock(UserPlacementRule.class);
DefaultPlacementRule parent =
mock(DefaultPlacementRule.class);
parent.defaultQueueName = "root.users";
when(fsRule.getParentRule()).thenReturn(parent);
when(fsRule.getCreateFlag()).thenReturn(true);
initPlacementManagerMock(fsRule);
csConf.setQueues(new QueuePath("root.users"), new String[] {"hadoop"});
convert();
verify(ruleHandler).handleRuleAutoCreateFlag(eq("root.users"));
verify(ruleHandler).handleChildStaticDynamicConflict(eq("root.users"));
verifyNoMoreInteractions(ruleHandler);
}
private void initPlacementManagerMock(
PlacementRule... rules) {
List<PlacementRule> listOfRules = Lists.newArrayList(rules);
when(placementManager.getPlacementRules()).thenReturn(listOfRules);
}
private MappingRulesDescription convert() {
return converter.convertPlacementPolicy(placementManager,
ruleHandler, csConf, true);
}
private MappingRulesDescription convertInWeightMode() {
return converter.convertPlacementPolicy(placementManager,
ruleHandler, csConf, false);
}
private void verifyRule(Rule rule, Policy expectedPolicy) {
assertEquals(expectedPolicy, rule.getPolicy(), "Policy type");
assertEquals("*", rule.getMatches(), "Match string");
assertEquals(FallbackResult.SKIP, rule.getFallbackResult(), "Fallback result");
assertEquals(Type.USER, rule.getType(), "Type");
}
private
|
TestQueuePlacementConverter
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/oceanbase/OceanbaseAlterTableAddPartitionTest2.java
|
{
"start": 967,
"end": 2490
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "alter table clients add partition partitions 18;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList();
SQLStatement stmt = stmtList.get(0);
{
String result = SQLUtils.toMySqlString(stmt);
assertEquals("ALTER TABLE clients"
+ "\n\tADD PARTITION PARTITIONS 18;",
result);
}
{
String result = SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("alter table clients"
+ "\n\tadd partition partitions 18;",
result);
}
assertEquals(1, stmtList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("t_basic_store")));
}
}
|
OceanbaseAlterTableAddPartitionTest2
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/value/exception/CustomUnexpectedValueMappingExceptionMapper.java
|
{
"start": 600,
"end": 1572
}
|
interface ____ {
@EnumMapping(unexpectedValueMappingException = CustomIllegalArgumentException.class)
@ValueMapping( source = MappingConstants.ANY_UNMAPPED, target = "DEFAULT" )
ExternalOrderType withAnyUnmapped(OrderType orderType);
@EnumMapping(unexpectedValueMappingException = CustomIllegalArgumentException.class)
@ValueMapping( source = MappingConstants.ANY_REMAINING, target = "DEFAULT" )
ExternalOrderType withAnyRemaining(OrderType orderType);
@EnumMapping(unexpectedValueMappingException = CustomIllegalArgumentException.class)
@ValueMapping(source = "EXTRA", target = "SPECIAL")
@ValueMapping(source = "STANDARD", target = "DEFAULT")
@ValueMapping(source = "NORMAL", target = "DEFAULT")
ExternalOrderType onlyWithMappings(OrderType orderType);
@InheritInverseConfiguration(name = "onlyWithMappings")
OrderType inverseOnlyWithMappings(ExternalOrderType orderType);
}
|
CustomUnexpectedValueMappingExceptionMapper
|
java
|
square__retrofit
|
retrofit/src/main/java/retrofit2/DefaultCallAdapterFactory.java
|
{
"start": 2115,
"end": 4151
}
|
class ____<T> implements Call<T> {
final Executor callbackExecutor;
final Call<T> delegate;
ExecutorCallbackCall(Executor callbackExecutor, Call<T> delegate) {
this.callbackExecutor = callbackExecutor;
this.delegate = delegate;
}
@Override
public void enqueue(final Callback<T> callback) {
Objects.requireNonNull(callback, "callback == null");
delegate.enqueue(
new Callback<T>() {
@Override
public void onResponse(Call<T> call, final Response<T> response) {
callbackExecutor.execute(
() -> {
if (delegate.isCanceled()) {
// Emulate OkHttp's behavior of throwing/delivering an IOException on
// cancellation.
callback.onFailure(ExecutorCallbackCall.this, new IOException("Canceled"));
} else {
callback.onResponse(ExecutorCallbackCall.this, response);
}
});
}
@Override
public void onFailure(Call<T> call, final Throwable t) {
callbackExecutor.execute(() -> callback.onFailure(ExecutorCallbackCall.this, t));
}
});
}
@Override
public boolean isExecuted() {
return delegate.isExecuted();
}
@Override
public Response<T> execute() throws IOException {
return delegate.execute();
}
@Override
public void cancel() {
delegate.cancel();
}
@Override
public boolean isCanceled() {
return delegate.isCanceled();
}
@SuppressWarnings("CloneDoesntCallSuperClone") // Performing deep clone.
@Override
public Call<T> clone() {
return new ExecutorCallbackCall<>(callbackExecutor, delegate.clone());
}
@Override
public Request request() {
return delegate.request();
}
@Override
public Timeout timeout() {
return delegate.timeout();
}
}
}
|
ExecutorCallbackCall
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/FsDatasetTestUtils.java
|
{
"start": 2962,
"end": 9191
}
|
interface ____ {
/**
* Corrupt the block file of the replica.
* @throws FileNotFoundException if the block file does not exist.
* @throws IOException if I/O error.
*/
void corruptData() throws IOException;
/**
* Corrupt the block file with the given content.
* @param newContent the new content written to the block file.
* @throws FileNotFoundException if the block file does not exist.
* @throws IOException if I/O error.
*/
void corruptData(byte[] newContent) throws IOException;
/**
* Truncate the block file of the replica to the newSize.
* @param newSize the new size of the block file.
* @throws FileNotFoundException if the block file does not exist.
* @throws IOException if I/O error.
*/
void truncateData(long newSize) throws IOException;
/**
* Delete the block file of the replica.
* @throws FileNotFoundException if the block file does not exist.
* @throws IOException if I/O error.
*/
void deleteData() throws IOException;
/**
* Corrupt the metadata file of the replica.
* @throws FileNotFoundException if the block file does not exist.
* @throws IOException if I/O error.
*/
void corruptMeta() throws IOException;
/**
* Delete the metadata file of the replcia.
* @throws FileNotFoundException if the block file does not exist.
* @throws IOException I/O error.
*/
void deleteMeta() throws IOException;
/**
* Truncate the metadata file of the replica to the newSize.
* @throws FileNotFoundException if the block file does not exist.
* @throws IOException I/O error.
*/
void truncateMeta(long newSize) throws IOException;
/**
* Make the replica unreachable, perhaps by renaming it to an
* invalid file name.
* @throws IOException On I/O error.
*/
void makeUnreachable() throws IOException;
}
/**
* Get a materialized replica to corrupt its block / crc later.
* @param block the block of this replica begone to.
* @return a replica to corrupt. Return null if the replica does not exist
* in this dataset.
* @throws ReplicaNotFoundException if the replica does not exists on the
* dataset.
*/
MaterializedReplica getMaterializedReplica(ExtendedBlock block)
throws ReplicaNotFoundException;
/**
* Create a finalized replica and add it into the FsDataset.
*/
Replica createFinalizedReplica(ExtendedBlock block) throws IOException;
/**
* Create a finalized replica on a particular volume, and add it into
* the FsDataset.
*/
Replica createFinalizedReplica(FsVolumeSpi volume, ExtendedBlock block)
throws IOException;
/**
* Create a {@link ReplicaInPipeline} and add it into the FsDataset.
*/
Replica createReplicaInPipeline(ExtendedBlock block) throws IOException;
/**
* Create a {@link ReplicaInPipeline} and add it into the FsDataset.
*/
Replica createReplicaInPipeline(FsVolumeSpi volume, ExtendedBlock block)
throws IOException;
/**
* Create a {@link ReplicaBeingWritten} and add it into the FsDataset.
*/
Replica createRBW(ExtendedBlock block) throws IOException;
/**
* Create a {@link ReplicaBeingWritten} on the particular volume, and add it
* into the FsDataset.
*/
Replica createRBW(FsVolumeSpi volume, ExtendedBlock block) throws IOException;
/**
* Create a {@link ReplicaWaitingToBeRecovered} object and add it into the
* FsDataset.
*/
Replica createReplicaWaitingToBeRecovered(ExtendedBlock block)
throws IOException;
/**
* Create a {@link ReplicaWaitingToBeRecovered} on the particular volume,
* and add it into the FsDataset.
*/
Replica createReplicaWaitingToBeRecovered(
FsVolumeSpi volume, ExtendedBlock block) throws IOException;
/**
* Create a {@link ReplicaUnderRecovery} object and add it into the FsDataset.
*/
Replica createReplicaUnderRecovery(ExtendedBlock block, long recoveryId)
throws IOException;
/**
* Check the stored files / data of a replica.
* @param replica a replica object.
* @throws IOException
*/
void checkStoredReplica(final Replica replica) throws IOException;
/**
* Create dummy replicas for block data and metadata.
* @param block the block of which replica to be created.
* @throws IOException on I/O error.
*/
void injectCorruptReplica(ExtendedBlock block) throws IOException;
/**
* Get the replica of a block. Returns null if it does not exist.
* @param block the block whose replica will be returned.
* @return Replica for the block.
*/
Replica fetchReplica(ExtendedBlock block);
/**
* @return The default value of number of data dirs per DataNode in
* MiniDFSCluster.
*/
int getDefaultNumOfDataDirs();
/**
* Obtain the raw capacity of underlying storage per DataNode.
*/
long getRawCapacity() throws IOException;
/**
* Get the persistently stored length of the block.
*/
long getStoredDataLength(ExtendedBlock block) throws IOException;
/**
* Get the persistently stored generation stamp.
*/
long getStoredGenerationStamp(ExtendedBlock block) throws IOException;
/**
* Change the persistently stored generation stamp.
* @param block the block whose generation stamp will be changed
* @param newGenStamp the new generation stamp
* @throws IOException
*/
void changeStoredGenerationStamp(ExtendedBlock block, long newGenStamp)
throws IOException;
/** Get all stored replicas in the specified block pool. */
Iterator<Replica> getStoredReplicas(String bpid) throws IOException;
/**
* Get the number of pending async deletions.
*/
long getPendingAsyncDeletions();
/**
* Verify the existence of the block pool.
*
* @param bpid block pool ID
* @throws IOException if the block pool does not exist.
*/
void verifyBlockPoolExists(String bpid) throws IOException;
/**
* Verify that the block pool does not exist.
*
* @param bpid block pool ID
* @throws IOException if the block pool does exist.
*/
void verifyBlockPoolMissing(String bpid) throws IOException;
}
|
MaterializedReplica
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/SlotRequestId.java
|
{
"start": 1628,
"end": 1988
}
|
class ____ extends AbstractID {
private static final long serialVersionUID = -6072105912250154283L;
public SlotRequestId(long lowerPart, long upperPart) {
super(lowerPart, upperPart);
}
public SlotRequestId() {}
@Override
public String toString() {
return "SlotRequestId{" + super.toString() + '}';
}
}
|
SlotRequestId
|
java
|
apache__dubbo
|
dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/mesh/rule/destination/TCPSettings.java
|
{
"start": 877,
"end": 1002
}
|
class ____ {
private int maxConnections;
private int connectTimeout;
private TcpKeepalive tcpKeepalive;
}
|
TCPSettings
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java
|
{
"start": 5074,
"end": 19199
}
|
class ____ extends AbstractChunkedSerializingTestCase<EsqlQueryResponse> {
private BlockFactory blockFactory;
@Before
public void newBlockFactory() {
BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking();
blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays);
}
@After
public void blockFactoryEmpty() {
assertThat(blockFactory.breaker().getUsed(), equalTo(0L));
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(List.of(AbstractPageMappingOperator.Status.ENTRY));
}
@Override
protected EsqlQueryResponse createXContextTestInstance(XContentType xContentType) {
// columnar param can't be different from the default value (false) since the EsqlQueryResponse will be serialized (by some random
// XContentType, not to a StreamOutput) and parsed back, which doesn't preserve columnar field's value.
return randomResponse(false, null);
}
@Override
protected EsqlQueryResponse createTestInstance() {
return randomResponse(randomBoolean(), randomProfile());
}
EsqlQueryResponse randomResponse(boolean columnar, EsqlQueryResponse.Profile profile) {
return randomResponseAsync(columnar, profile, false);
}
EsqlQueryResponse randomResponseAsync(boolean columnar, EsqlQueryResponse.Profile profile, boolean async) {
int noCols = randomIntBetween(1, 10);
List<ColumnInfoImpl> columns = randomList(noCols, noCols, this::randomColumnInfo);
int noPages = randomIntBetween(1, 20);
List<Page> values = randomList(noPages, noPages, () -> randomPage(columns));
String id = null;
boolean isRunning = false;
long startTimeMillis = 0L;
long expirationTimeMillis = 0L;
if (async) {
id = randomAlphaOfLengthBetween(1, 16);
isRunning = randomBoolean();
}
return new EsqlQueryResponse(
columns,
values,
randomNonNegativeLong(),
randomNonNegativeLong(),
profile,
columnar,
id,
isRunning,
async,
startTimeMillis,
expirationTimeMillis,
createExecutionInfo()
);
}
EsqlExecutionInfo createExecutionInfo() {
EsqlExecutionInfo executionInfo = new EsqlExecutionInfo(true);
executionInfo.overallTook(new TimeValue(5000));
executionInfo.swapCluster(
"",
(k, v) -> new EsqlExecutionInfo.Cluster(
"",
"logs-1",
false,
EsqlExecutionInfo.Cluster.Status.SUCCESSFUL,
10,
10,
3,
0,
null,
new TimeValue(4444L)
)
);
executionInfo.swapCluster(
"remote1",
(k, v) -> new EsqlExecutionInfo.Cluster(
"remote1",
"remote1:logs-1",
true,
EsqlExecutionInfo.Cluster.Status.SUCCESSFUL,
12,
12,
5,
0,
null,
new TimeValue(4999L)
)
);
return executionInfo;
}
private ColumnInfoImpl randomColumnInfo() {
DataType type = randomValueOtherThanMany(
t -> false == DataType.isPrimitiveAndSupported(t)
|| t == DataType.DATE_PERIOD
|| t == DataType.TIME_DURATION
|| t == DataType.PARTIAL_AGG
|| t == DataType.AGGREGATE_METRIC_DOUBLE
|| t == DataType.TSID_DATA_TYPE,
() -> randomFrom(DataType.types())
).widenSmallNumeric();
return new ColumnInfoImpl(randomAlphaOfLength(10), type.esType(), randomOriginalTypes());
}
@Nullable
public static List<String> randomOriginalTypes() {
return randomBoolean() ? null : UnsupportedEsFieldTests.randomOriginalTypes();
}
private EsqlQueryResponse.Profile randomProfile() {
if (randomBoolean()) {
return null;
}
return new EsqlQueryResponseProfileTests().createTestInstance();
}
private Page randomPage(List<ColumnInfoImpl> columns) {
return new Page(columns.stream().map(c -> {
Block.Builder builder = PlannerUtils.toElementType(c.type()).newBlockBuilder(1, blockFactory);
switch (c.type()) {
case UNSIGNED_LONG, LONG, COUNTER_LONG -> ((LongBlock.Builder) builder).appendLong(randomLong());
case INTEGER, COUNTER_INTEGER -> ((IntBlock.Builder) builder).appendInt(randomInt());
case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock.Builder) builder).appendDouble(randomDouble());
case KEYWORD -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10)));
case TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10000)));
case IP -> ((BytesRefBlock.Builder) builder).appendBytesRef(
new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean())))
);
case DATETIME -> ((LongBlock.Builder) builder).appendLong(randomInstant().toEpochMilli());
case BOOLEAN -> ((BooleanBlock.Builder) builder).appendBoolean(randomBoolean());
case UNSUPPORTED -> ((BytesRefBlock.Builder) builder).appendNull();
// TODO - add a random instant thing here?
case DATE_NANOS -> ((LongBlock.Builder) builder).appendLong(randomNonNegativeLong());
case VERSION -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(randomIdentifier()).toBytesRef());
case GEO_POINT -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.asWkb(GeometryTestUtils.randomPoint()));
case CARTESIAN_POINT -> ((BytesRefBlock.Builder) builder).appendBytesRef(CARTESIAN.asWkb(ShapeTestUtils.randomPoint()));
case GEO_SHAPE -> ((BytesRefBlock.Builder) builder).appendBytesRef(
GEO.asWkb(GeometryTestUtils.randomGeometry(randomBoolean()))
);
case CARTESIAN_SHAPE -> ((BytesRefBlock.Builder) builder).appendBytesRef(
CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(randomBoolean()))
);
case GEOHASH -> {
Point p = GeometryTestUtils.randomPoint();
((LongBlock.Builder) builder).appendLong(
Geohash.longEncode(p.getX(), p.getY(), randomIntBetween(1, Geohash.PRECISION))
);
}
case GEOTILE -> {
Point p = GeometryTestUtils.randomPoint();
((LongBlock.Builder) builder).appendLong(
GeoTileUtils.longEncode(p.getX(), p.getY(), randomIntBetween(0, GeoTileUtils.MAX_ZOOM))
);
}
case GEOHEX -> {
Point p = GeometryTestUtils.randomPoint();
((LongBlock.Builder) builder).appendLong(H3.geoToH3(p.getLat(), p.getLon(), randomIntBetween(1, H3.MAX_H3_RES)));
}
case AGGREGATE_METRIC_DOUBLE -> {
BlockLoader.AggregateMetricDoubleBuilder aggBuilder = (BlockLoader.AggregateMetricDoubleBuilder) builder;
aggBuilder.min().appendDouble(randomDouble());
aggBuilder.max().appendDouble(randomDouble());
aggBuilder.sum().appendDouble(randomDouble());
aggBuilder.count().appendInt(randomInt());
}
case NULL -> builder.appendNull();
case SOURCE -> {
try {
((BytesRefBlock.Builder) builder).appendBytesRef(
BytesReference.bytes(
JsonXContent.contentBuilder()
.startObject()
.field(randomAlphaOfLength(3), randomAlphaOfLength(10))
.endObject()
).toBytesRef()
);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
case DENSE_VECTOR -> {
BlockLoader.FloatBuilder floatBuilder = (BlockLoader.FloatBuilder) builder;
int dims = randomIntBetween(32, 64) * 2; // min 64 dims, always even
floatBuilder.beginPositionEntry();
for (int i = 0; i < dims; i++) {
floatBuilder.appendFloat(randomFloat());
}
floatBuilder.endPositionEntry();
}
case TSID_DATA_TYPE -> {
BytesRef tsIdValue = (BytesRef) EsqlTestUtils.randomLiteral(DataType.TSID_DATA_TYPE).value();
((BytesRefBlock.Builder) builder).appendBytesRef(tsIdValue);
}
case EXPONENTIAL_HISTOGRAM -> {
ExponentialHistogramBlockBuilder expBuilder = (ExponentialHistogramBlockBuilder) builder;
int valueCount = randomIntBetween(0, 500);
int bucketCount = randomIntBetween(4, Math.max(4, valueCount));
ExponentialHistogram histo = ExponentialHistogram.create(
bucketCount,
ExponentialHistogramCircuitBreaker.noop(),
randomDoubles(valueCount).toArray()
);
expBuilder.append(histo);
}
// default -> throw new UnsupportedOperationException("unsupported data type [" + c + "]");
}
return builder.build();
}).toArray(Block[]::new));
}
@Override
protected EsqlQueryResponse mutateInstance(EsqlQueryResponse instance) {
boolean allNull = true;
for (ColumnInfoImpl info : instance.columns()) {
// values inside NULL and UNSUPPORTED blocks cannot be mutated, because they are all null
if (info.type() != DataType.NULL && info.type() != DataType.UNSUPPORTED) {
allNull = false;
}
}
List<ColumnInfoImpl> columns = instance.columns();
List<Page> pages = deepCopyOfPages(instance);
long documentsFound = instance.documentsFound();
long valuesLoaded = instance.valuesLoaded();
EsqlQueryResponse.Profile profile = instance.profile();
boolean columnar = instance.columnar();
boolean isAsync = instance.isAsync();
EsqlExecutionInfo executionInfo = instance.getExecutionInfo();
switch (allNull ? between(0, 4) : between(0, 5)) {
case 0 -> {
int mutCol = between(0, instance.columns().size() - 1);
columns = new ArrayList<>(instance.columns());
// keep the type the same so the values are still valid but change the name
ColumnInfoImpl mut = columns.get(mutCol);
columns.set(mutCol, new ColumnInfoImpl(mut.name() + "mut", mut.type(), mut.originalTypes()));
}
case 1 -> documentsFound = randomValueOtherThan(documentsFound, ESTestCase::randomNonNegativeLong);
case 2 -> valuesLoaded = randomValueOtherThan(valuesLoaded, ESTestCase::randomNonNegativeLong);
case 3 -> columnar = false == columnar;
case 4 -> profile = randomValueOtherThan(profile, this::randomProfile);
case 5 -> {
assert allNull == false
: "can't replace values while preserving types if all pages are null - the only valid values are null";
int noPages = instance.pages().size();
List<Page> differentPages = List.of();
do {
differentPages.forEach(p -> Releasables.closeExpectNoException(p::releaseBlocks));
differentPages = randomList(noPages, noPages, () -> randomPage(instance.columns()));
} while (differentPages.equals(instance.pages()));
pages.forEach(Page::releaseBlocks);
pages = differentPages;
}
default -> throw new IllegalArgumentException();
}
return new EsqlQueryResponse(columns, pages, documentsFound, valuesLoaded, profile, columnar, isAsync, 0L, 0L, executionInfo);
}
private List<Page> deepCopyOfPages(EsqlQueryResponse response) {
List<Page> deepCopiedPages = new ArrayList<>(response.pages().size());
for (Page p : response.pages()) {
Block[] deepCopiedBlocks = new Block[p.getBlockCount()];
for (int b = 0; b < p.getBlockCount(); b++) {
deepCopiedBlocks[b] = BlockUtils.deepCopyOf(p.getBlock(b), blockFactory);
}
deepCopiedPages.add(new Page(deepCopiedBlocks));
}
assertThat(deepCopiedPages, equalTo(response.pages()));
return deepCopiedPages;
}
@Override
protected Writeable.Reader<EsqlQueryResponse> instanceReader() {
return EsqlQueryResponse.reader(blockFactory);
}
@Override
protected EsqlQueryResponse doParseInstance(XContentParser parser) {
return ResponseBuilder.fromXContent(parser);
}
/**
* Used to test round tripping through x-content. Unlike lots of other
* response objects, ESQL doesn't have production code that can parse
* the response because it doesn't need it. But we want to test random
* responses are valid. This helps with that by parsing it into a
* response.
*/
public static
|
EsqlQueryResponseTests
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/boot/model/JoinedSubclassPersistentEntity.java
|
{
"start": 644,
"end": 2811
}
|
class ____ extends SubclassPersistentEntity {
private final List<Attribute> attributes;
private String discriminatorValue;
private Key key;
public JoinedSubclassPersistentEntity(AuditTableData auditTableData, PersistentClass persistentClass) {
super( auditTableData, persistentClass );
this.attributes = new ArrayList<>();
}
@Override
public void addAttribute(Attribute attribute) {
attributes.add( attribute );
}
public void setKey(Key key) {
this.key = key;
}
public String getDiscriminatorValue() {
return discriminatorValue;
}
public void setDiscriminatorValue(String discriminatorValue) {
this.discriminatorValue = discriminatorValue;
}
// @Override
// public List<Join> getJoins() {
// throw new UnsupportedOperationException();
// }
//
// @Override
// public void addJoin(Join join) {
// throw new UnsupportedOperationException();
// }
@Override
public void build(JaxbHbmHibernateMapping mapping) {
mapping.getJoinedSubclass().add( buildJaxbMapping() );
}
public JaxbHbmJoinedSubclassEntityType buildJaxbMapping() {
final JaxbHbmJoinedSubclassEntityType entity = new JaxbHbmJoinedSubclassEntityType();
entity.setExtends( getExtends() );
// Set common stuff
if ( getPersistentClass() != null ) {
entity.setAbstract( getPersistentClass().isAbstract() );
}
if ( !StringTools.isEmpty(getAuditTableData().getAuditEntityName() ) ) {
entity.setEntityName( getAuditTableData().getAuditEntityName() );
}
if ( !StringTools.isEmpty( getAuditTableData().getAuditTableName() ) ) {
entity.setTable( getAuditTableData().getAuditTableName() );
}
if ( !StringTools.isEmpty( getAuditTableData().getSchema() ) ) {
entity.setSchema( getAuditTableData().getSchema() );
}
if ( !StringTools.isEmpty( getAuditTableData().getCatalog() ) ) {
entity.setCatalog( getAuditTableData().getCatalog() );
}
for ( Attribute attribute : attributes ) {
entity.getAttributes().add( attribute.build() );
}
entity.setKey( key.build() );
if ( !StringTools.isEmpty( discriminatorValue ) ) {
entity.setDiscriminatorValue( discriminatorValue );
}
return entity;
}
}
|
JoinedSubclassPersistentEntity
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.