language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByCheckerTest.java
|
{
"start": 10139,
"end": 11110
}
|
class ____ {
@GuardedBy("itself")
List<String> xs;
void f() {
// BUG: Diagnostic contains:
// should be guarded by 'this.xs'
this.xs.add("");
synchronized (this.xs) {
this.xs.add("");
}
synchronized (this.xs) {
xs.add("");
}
synchronized (xs) {
this.xs.add("");
}
synchronized (xs) {
xs.add("");
}
}
}
""")
.doTest();
}
@Test
public void methodQualifiedWithThis() {
compilationHelper
.addSourceLines(
"threadsafety/Test.java",
"""
package threadsafety;
import java.util.List;
import com.google.errorprone.annotations.concurrent.GuardedBy;
|
Itself
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java
|
{
"start": 117130,
"end": 122940
}
|
class ____ extends Rule<LogicalPlan, LogicalPlan> {
@Override
public LogicalPlan apply(LogicalPlan plan) {
return plan.transformUp(Aggregate.class, p -> p.childrenResolved() == false ? p : doRule(p));
}
private LogicalPlan doRule(Aggregate plan) {
Map<String, FieldAttribute> unionFields = new HashMap<>();
Holder<Boolean> aborted = new Holder<>(Boolean.FALSE);
var newPlan = plan.transformExpressionsOnly(AggregateFunction.class, aggFunc -> {
if (aggFunc.field() instanceof FieldAttribute fa && fa.field() instanceof InvalidMappedField mtf) {
if (mtf.types().contains(AGGREGATE_METRIC_DOUBLE) == false
|| mtf.types().stream().allMatch(f -> f == AGGREGATE_METRIC_DOUBLE || f.isNumeric()) == false) {
aborted.set(Boolean.TRUE);
return aggFunc;
}
Map<String, Expression> typeConverters = typeConverters(aggFunc, fa, mtf);
if (typeConverters == null) {
aborted.set(Boolean.TRUE);
return aggFunc;
}
var newField = unionFields.computeIfAbsent(
Attribute.rawTemporaryName(fa.name(), aggFunc.functionName(), aggFunc.sourceText()),
newName -> new FieldAttribute(
fa.source(),
fa.parentName(),
fa.qualifier(),
newName,
MultiTypeEsField.resolveFrom(mtf, typeConverters),
fa.nullable(),
null,
true
)
);
List<Expression> children = new ArrayList<>(aggFunc.children());
children.set(0, newField);
return aggFunc.replaceChildren(children);
}
return aggFunc;
});
if (unionFields.isEmpty() || aborted.get()) {
return plan;
}
return ResolveUnionTypes.addGeneratedFieldsToEsRelations(newPlan, unionFields.values().stream().toList());
}
private Map<String, Expression> typeConverters(AggregateFunction aggFunc, FieldAttribute fa, InvalidMappedField mtf) {
var metric = getMetric(aggFunc);
if (metric == null) {
return null;
}
Map<String, Expression> typeConverter = new HashMap<>();
for (DataType type : mtf.types()) {
final ConvertFunction convert;
// Counting on aggregate metric double has unique behavior in that we cannot just provide the number of
// documents, instead we have to look inside the aggregate metric double's count field and sum those together.
// Grabbing the count value with FromAggregateMetricDouble the same way we do with min/max/sum would result in
// a single Int field, and incorrectly be treated as 1 document (instead of however many originally went into
// the aggregate metric double).
if (metric == AggregateMetricDoubleBlockBuilder.Metric.COUNT) {
convert = new ToAggregateMetricDouble(fa.source(), fa);
} else if (type == AGGREGATE_METRIC_DOUBLE) {
convert = FromAggregateMetricDouble.withMetric(aggFunc.source(), fa, metric);
} else if (type.isNumeric()) {
convert = new ToDouble(fa.source(), fa);
} else {
return null;
}
Expression expression = ResolveUnionTypes.typeSpecificConvert(convert, fa.source(), type, mtf);
typeConverter.put(type.typeName(), expression);
}
return typeConverter;
}
private static AggregateMetricDoubleBlockBuilder.Metric getMetric(AggregateFunction aggFunc) {
if (aggFunc instanceof Max || aggFunc instanceof MaxOverTime) {
return AggregateMetricDoubleBlockBuilder.Metric.MAX;
}
if (aggFunc instanceof Min || aggFunc instanceof MinOverTime) {
return AggregateMetricDoubleBlockBuilder.Metric.MIN;
}
if (aggFunc instanceof Sum || aggFunc instanceof SumOverTime) {
return AggregateMetricDoubleBlockBuilder.Metric.SUM;
}
if (aggFunc instanceof Count || aggFunc instanceof CountOverTime) {
return AggregateMetricDoubleBlockBuilder.Metric.COUNT;
}
if (aggFunc instanceof Avg || aggFunc instanceof AvgOverTime) {
return AggregateMetricDoubleBlockBuilder.Metric.COUNT;
}
if (aggFunc instanceof Present || aggFunc instanceof PresentOverTime) {
return AggregateMetricDoubleBlockBuilder.Metric.COUNT;
}
if (aggFunc instanceof Absent || aggFunc instanceof AbsentOverTime) {
return AggregateMetricDoubleBlockBuilder.Metric.COUNT;
}
return null;
}
}
/**
* Handle union types in UnionAll:
* 1. Push down explicit conversion functions into the UnionAll branches
* 2. Replace the explicit conversion functions with the corresponding attributes in the UnionAll output
* 3. Implicitly cast the outputs of the UnionAll branches to the common type, this applies to date and date_nanos types only
* 4. Update the attributes referencing the updated UnionAll output
*/
private static
|
ImplicitCastAggregateMetricDoubles
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 297376,
"end": 299739
}
|
class ____ the input data. Append a to the end of the name if you want the input to be an array type.", displayName = "Out Type"),
@YamlProperty(name = "param", type = "array:org.apache.camel.model.rest.ParamDefinition", description = "Information about parameters for this REST operation", displayName = "Param"),
@YamlProperty(name = "path", type = "string", description = "The path mapping URIs of this REST operation such as /{id}.", displayName = "Path"),
@YamlProperty(name = "produces", type = "string", description = "To define the content type what the REST service produces (uses for output), such as application/xml or application/json This option will override what may be configured on a parent level", displayName = "Produces"),
@YamlProperty(name = "responseMessage", type = "array:org.apache.camel.model.rest.ResponseMessageDefinition", description = "Response details for this REST operation", displayName = "Response Message"),
@YamlProperty(name = "routeId", type = "string", description = "Sets the id of the route", displayName = "Route Id"),
@YamlProperty(name = "security", type = "array:org.apache.camel.model.rest.SecurityDefinition", description = "Security settings for this REST operation", displayName = "Security"),
@YamlProperty(name = "skipBindingOnErrorCode", type = "boolean", defaultValue = "false", description = "Whether to skip binding on output if there is a custom HTTP error code header. This allows to build custom error messages that do not bind to json / xml etc, as success messages otherwise will do. This option will override what may be configured on a parent level", displayName = "Skip Binding On Error Code"),
@YamlProperty(name = "streamCache", type = "boolean", defaultValue = "false", description = "Whether stream caching is enabled on this rest operation.", displayName = "Stream Cache"),
@YamlProperty(name = "to", type = "object:org.apache.camel.model.ToDefinition", description = "The Camel endpoint this REST service will call, such as a direct endpoint to link to an existing route that handles this REST call.", displayName = "To"),
@YamlProperty(name = "type", type = "string", description = "Sets the
|
of
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/route/RouteDefinitionTest.java
|
{
"start": 867,
"end": 2295
}
|
class ____ {
@Test
public void addRouteDefinitionKeepsExistingMetadata() {
Map<String, Object> originalMetadata = Maps.newHashMap("key", "value");
Map<String, Object> newMetadata = Maps.newHashMap("key2", "value2");
RouteDefinition routeDefinition = new RouteDefinition();
routeDefinition.setMetadata(originalMetadata);
routeDefinition.getMetadata().putAll(newMetadata);
assertThat(routeDefinition.getMetadata()).hasSize(2)
.containsAllEntriesOf(originalMetadata)
.containsAllEntriesOf(newMetadata);
}
@Test
public void setRouteDefinitionReplacesExistingMetadata() {
Map<String, Object> originalMetadata = Maps.newHashMap("key", "value");
Map<String, Object> newMetadata = Maps.newHashMap("key2", "value2");
RouteDefinition routeDefinition = new RouteDefinition();
routeDefinition.setMetadata(originalMetadata);
routeDefinition.setMetadata(newMetadata);
assertThat(routeDefinition.getMetadata()).isEqualTo(newMetadata);
}
@Test
public void addSingleMetadataEntryKeepsOriginalMetadata() {
Map<String, Object> originalMetadata = Maps.newHashMap("key", "value");
RouteDefinition routeDefinition = new RouteDefinition();
routeDefinition.setMetadata(originalMetadata);
routeDefinition.getMetadata().put("key2", "value2");
assertThat(routeDefinition.getMetadata()).hasSize(2)
.containsAllEntriesOf(originalMetadata)
.containsEntry("key2", "value2");
}
}
|
RouteDefinitionTest
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/SpelCompilationCoverageTests.java
|
{
"start": 255164,
"end": 255723
}
|
class ____ {
public String s = null;
public void reset() {
s = null;
}
public void concat(String arg) {
s = "::"+arg;
}
public void concat(String... vargs) {
if (vargs == null) {
s = "";
}
else {
s = "";
for (String varg : vargs) {
s += varg;
}
}
}
public void concat2(Object arg) {
s = "::"+arg;
}
public void concat2(Object... vargs) {
if (vargs == null) {
s = "";
}
else {
s = "";
for (Object varg : vargs) {
s += varg;
}
}
}
}
public static
|
TestClass10
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/client/protocol/decoder/RankedEntryDecoder.java
|
{
"start": 997,
"end": 1580
}
|
class ____ implements MultiDecoder<RankedEntry<?>> {
@Override
public Decoder<Object> getDecoder(Codec codec, int paramNum, State state, long size) {
if (paramNum % 2 != 0) {
return DoubleCodec.INSTANCE.getValueDecoder();
}
return LongCodec.INSTANCE.getValueDecoder();
}
@Override
public RankedEntry<?> decode(List<Object> parts, State state) {
if (parts.isEmpty()) {
return null;
}
return new RankedEntry<>(((Long) parts.get(0)).intValue(), (Double) parts.get(1));
}
}
|
RankedEntryDecoder
|
java
|
google__guice
|
core/src/com/google/inject/internal/MissingConstructorError.java
|
{
"start": 441,
"end": 2244
}
|
class ____ extends InternalErrorDetail<MissingConstructorError> {
private final TypeLiteral<?> type;
private final boolean atInjectRequired;
MissingConstructorError(TypeLiteral<?> type, boolean atInjectRequired, List<Object> sources) {
super(
ErrorId.MISSING_CONSTRUCTOR,
"No injectable constructor for type " + type + ".",
sources,
null);
this.type = type;
this.atInjectRequired = atInjectRequired;
}
@Override
public boolean isMergeable(ErrorDetail<?> other) {
if (other instanceof MissingConstructorError) {
MissingConstructorError otherMissing = (MissingConstructorError) other;
return Objects.equal(type, otherMissing.type)
&& Objects.equal(atInjectRequired, otherMissing.atInjectRequired);
}
return false;
}
@Override
protected void formatDetail(List<ErrorDetail<?>> mergeableErrors, Formatter formatter) {
formatter.format("\n");
Class<?> rawType = type.getRawType();
if (atInjectRequired) {
formatter.format(
"Injector is configured to require @Inject constructors but %s does not have a @Inject"
+ " annotated constructor.\n",
rawType);
} else {
Constructor<?> noArgConstructor = null;
try {
noArgConstructor = type.getRawType().getDeclaredConstructor();
} catch (NoSuchMethodException e) {
// Ignore
}
if (noArgConstructor == null) {
formatter.format(
"%s does not have a @Inject annotated constructor or a no-arg constructor.\n", rawType);
} else if (Modifier.isPrivate(noArgConstructor.getModifiers())
&& !Modifier.isPrivate(rawType.getModifiers())) {
formatter.format(
"%s has a private no-arg constructor but the
|
MissingConstructorError
|
java
|
elastic__elasticsearch
|
libs/geo/src/main/java/org/elasticsearch/geometry/LinearRing.java
|
{
"start": 735,
"end": 2397
}
|
class ____ extends Line {
public static final LinearRing EMPTY = new LinearRing();
private LinearRing() {}
public LinearRing(double[] x, double[] y) {
this(x, y, null);
}
public LinearRing(double[] x, double[] y, double[] z) {
super(x, y, z);
if (x.length < 2) {
throw new IllegalArgumentException("linear ring cannot contain less than 2 points, found " + x.length);
}
int last = x.length - 1;
if (x[0] != x[last] || y[0] != y[last] || (z != null && z[0] != z[last])) {
throw new IllegalArgumentException(
"first and last points of the linear ring must be the same (it must close itself):"
+ " x[0]="
+ x[0]
+ " x["
+ last
+ "]="
+ x[last]
+ " y[0]="
+ y[0]
+ " y["
+ last
+ "]="
+ y[last]
+ (z == null ? "" : " z[0]=" + z[0] + " z[" + last + "]=" + z[last])
);
}
}
@Override
public ShapeType type() {
return ShapeType.LINEARRING;
}
@Override
public <T, E extends Exception> T visit(GeometryVisitor<T, E> visitor) throws E {
return visitor.visit(this);
}
@Override
public String toString() {
return "linearring(x="
+ Arrays.toString(getX())
+ ", y="
+ Arrays.toString(getY())
+ (hasZ() ? ", z=" + Arrays.toString(getZ()) : "")
+ ")";
}
}
|
LinearRing
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/admin/internals/AdminFetchMetricsManager.java
|
{
"start": 953,
"end": 1470
}
|
class ____ {
private final Metrics metrics;
public AdminFetchMetricsManager(Metrics metrics) {
this.metrics = metrics;
}
public void recordLatency(String node, long requestLatencyMs) {
if (!node.isEmpty()) {
String nodeTimeName = "node-" + node + ".latency";
Sensor nodeRequestTime = this.metrics.getSensor(nodeTimeName);
if (nodeRequestTime != null)
nodeRequestTime.record(requestLatencyMs);
}
}
}
|
AdminFetchMetricsManager
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/event/AbstractApplicationEventListenerTests.java
|
{
"start": 963,
"end": 1441
}
|
class ____ {
protected ResolvableType getGenericApplicationEventType(String fieldName) {
try {
return ResolvableType.forField(TestEvents.class.getField(fieldName));
}
catch (NoSuchFieldException ex) {
throw new IllegalStateException("No such field on Events '" + fieldName + "'");
}
}
protected <T> GenericTestEvent<T> createGenericTestEvent(T payload) {
return new GenericTestEvent<>(this, payload);
}
protected static
|
AbstractApplicationEventListenerTests
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/domain/sample/AuditableUser.java
|
{
"start": 1372,
"end": 2100
}
|
class ____ extends AbstractAuditable<AuditableUser, Integer> {
private String firstname;
@ManyToMany(
cascade = { CascadeType.PERSIST, CascadeType.MERGE }) private final Set<AuditableRole> roles = new HashSet<>();
public AuditableUser() {
this(null);
}
public AuditableUser(@Nullable Integer id) {
this(id, null);
}
public AuditableUser(@Nullable Integer id, String firstname) {
setId(id);
this.firstname = firstname;
}
public String getFirstname() {
return firstname;
}
public void setFirstname(final String firstname) {
this.firstname = firstname;
}
public void addRole(AuditableRole role) {
this.roles.add(role);
}
public Set<AuditableRole> getRoles() {
return roles;
}
}
|
AuditableUser
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/FieldMissingNullableTest.java
|
{
"start": 17048,
"end": 17183
}
|
interface ____ {}
}
""")
.addOutputLines(
"out/Test.java",
"""
|
Nullable
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanForBrokenFactoryBeanIntegrationTests.java
|
{
"start": 1895,
"end": 2207
}
|
class ____ implements FactoryBean<TestBean> {
TestFactoryBean() {
throw new BeanCreationException("simulating missing dependencies");
}
@Override
public TestBean getObject() {
return () -> "prod";
}
@Override
public Class<?> getObjectType() {
return TestBean.class;
}
}
|
TestFactoryBean
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/relational/ContributableDatabaseObject.java
|
{
"start": 387,
"end": 463
}
|
interface ____ extends Contributable, Exportable {
}
|
ContributableDatabaseObject
|
java
|
alibaba__nacos
|
common/src/test/java/com/alibaba/nacos/common/cache/impl/SimpleCacheTest.java
|
{
"start": 961,
"end": 1604
}
|
class ____ {
@Test
void test() throws Exception {
Cache cache = CacheBuilder.builder().initializeCapacity(100).build();
IntStream.range(0, 100).forEach(item -> cache.put(item, item));
assertEquals(100, cache.getSize());
Object item = cache.remove(89);
assertEquals(89, item);
assertEquals(99, cache.getSize());
assertNull(cache.get(89));
assertEquals(99, cache.get(99));
assertEquals(99, cache.get(99, () -> 99999));
assertEquals(87, cache.get(111, () -> 87));
cache.clear();
assertEquals(0, cache.getSize());
}
}
|
SimpleCacheTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java
|
{
"start": 5420,
"end": 6416
}
|
class ____ extends ValuesSource {
@Override
public DocValueBits docsWithValue(LeafReaderContext context) throws IOException {
final SortedBinaryDocValues bytes = bytesValues(context);
return org.elasticsearch.index.fielddata.FieldData.docsWithValue(bytes);
}
@Override
public final Function<Rounding, Rounding.Prepared> roundingPreparer(AggregationContext context) throws IOException {
throw AggregationErrors.unsupportedRounding("BYTES");
}
/**
* Specialization of {@linkplain Bytes} who's underlying storage
* de-duplicates its bytes by storing them in a per-leaf sorted
* lookup table. Aggregations that are aware of these lookup tables
* can operate directly on the value's position in the table, know as
* the "ordinal". They can then later translate the ordinal into
* the {@link BytesRef} value.
*/
public abstract static
|
Bytes
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/hhh13712/HHH13712Test.java
|
{
"start": 2779,
"end": 2897
}
|
class ____ {
@Id
@Column
Long id;
SomeOther() {
}
SomeOther(Long id) {
this.id = id;
}
}
}
|
SomeOther
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/executor/RedissonExecutorServiceSpringTest.java
|
{
"start": 1514,
"end": 1855
}
|
class ____ implements Callable<String>, Serializable {
@Autowired
private SampleBean bean;
public SampleCallable() {
}
@Override
public String call() throws Exception {
return bean.myMethod("callable");
}
}
@Service
public static
|
SampleCallable
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/JsonAliasWithDeduction4327Test.java
|
{
"start": 1065,
"end": 1981
}
|
class ____ implements Deduction {
// add "y" as redundant choice to make sure it won't break anything
@JsonAlias(value = {"y", "Y", "yy", "ff", "X"})
public int y;
// IMPORTANT! Can have field and setter, but alias values are not merged;
// highest priority one is used instead of lower if both defined (Setter
// having higher priority than Field)
public void setY(int y) { this.y = y; }
}
private final ObjectMapper mapper = jsonMapperBuilder().build();
@ParameterizedTest
@ValueSource(strings = {"y", "Y", "yy", "ff", "X"})
public void testAliasWithPolymorphicDeduction(String field) throws Exception {
String json = a2q(String.format("{'%s': 2 }", field));
Deduction value = mapper.readValue(json, Deduction.class);
assertNotNull(value);
assertEquals(2, ((DeductionBean2) value).y);
}
}
|
DeductionBean2
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/naturalid/BasicNaturalIdCachingTests.java
|
{
"start": 1529,
"end": 3689
}
|
class ____ {
@Test
public void testMapping(SessionFactoryScope scope) {
final NaturalIdDataAccess cacheAccess = resolveCacheAccess( scope );
assertThat( cacheAccess, notNullValue() );
}
private NaturalIdDataAccess resolveCacheAccess(SessionFactoryScope scope) {
final SessionFactoryImplementor sessionFactory = scope.getSessionFactory();
final EntityPersister entityPersister = sessionFactory.getMappingMetamodel().getEntityDescriptor( CachedEntity.class );
return entityPersister.getNaturalIdMapping().getCacheAccess();
}
@Test
public void testCreationCaching(SessionFactoryScope scope) {
final SessionFactoryImplementor sessionFactory = scope.getSessionFactory();
final StatisticsImplementor statistics = sessionFactory.getStatistics();
statistics.clear();
scope.inTransaction(
(session) -> {
session.persist( new CachedEntity( 1, "abc", "the entity" ) );
}
);
final NaturalIdStatistics cachedStats = statistics.getNaturalIdStatistics( CachedEntity.class.getName() );
assertThat( cachedStats, notNullValue() );
assertThat( cachedStats.getCacheHitCount(), is( 0L ) );
assertThat( cachedStats.getCacheMissCount(), is( 0L ) );
assertThat( cachedStats.getCachePutCount(), is( 1L ) );
scope.inTransaction(
(session) -> {
final EntityPersister entityPersister = sessionFactory.getMappingMetamodel().getEntityDescriptor( CachedEntity.class );
final NaturalIdDataAccess cacheAccess = resolveCacheAccess( scope );
final Object cacheKey = cacheAccess.generateCacheKey( "abc", entityPersister, session );
final Object cached = cacheAccess.get( session, cacheKey );
assertThat( cached, notNullValue() );
assertThat( cached, equalTo( 1 ) );
}
);
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
// make sure the data is not in the L2 cache
scope.getSessionFactory().getCache().evictAllRegions();
scope.getSessionFactory().getCache().evictNaturalIdData();
}
@Entity( name = "CachedEntity" )
@Table( name = "natural_id_cached" )
@NaturalIdCache
public static
|
BasicNaturalIdCachingTests
|
java
|
apache__camel
|
components/camel-univocity-parsers/src/main/java/org/apache/camel/dataformat/univocity/AbstractUniVocityDataFormat.java
|
{
"start": 1985,
"end": 12086
}
|
class ____<
F extends Format, CWS extends CommonWriterSettings<F>,
W extends AbstractWriter<CWS>, CPS extends CommonParserSettings<F>, P extends AbstractParser<CPS>,
DF extends AbstractUniVocityDataFormat<F, CWS, W, CPS, P, DF>>
extends ServiceSupport
implements DataFormat, DataFormatName {
protected String nullValue;
protected Boolean skipEmptyLines;
protected Boolean ignoreTrailingWhitespaces;
protected Boolean ignoreLeadingWhitespaces;
protected boolean headersDisabled;
protected String headers;
protected Boolean headerExtractionEnabled;
protected Integer numberOfRecordsToRead;
protected String emptyValue;
protected String lineSeparator;
protected Character normalizedLineSeparator;
protected Character comment;
protected boolean lazyLoad;
protected boolean asMap;
private volatile CWS writerSettings;
private volatile Marshaller<W> marshaller;
private volatile CPS parserSettings;
private volatile Unmarshaller<P> unmarshaller;
private final HeaderRowProcessor headerRowProcessor = new HeaderRowProcessor();
@Override
public void marshal(Exchange exchange, Object body, OutputStream stream) throws Exception {
if (writerSettings == null) {
writerSettings = createAndConfigureWriterSettings();
}
if (marshaller == null) {
marshaller = new Marshaller<>(headersAsArray(), headers == null);
}
try (Writer writer = new OutputStreamWriter(stream, getCharsetName(exchange))) {
marshaller.marshal(exchange, body, createWriter(writer, writerSettings));
}
}
@Override
public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
if (parserSettings == null) {
parserSettings = createAndConfigureParserSettings();
}
if (unmarshaller == null) {
unmarshaller = new Unmarshaller<>(lazyLoad, asMap);
}
P parser = createParser(parserSettings);
// univocity-parsers is responsible for closing the reader, even in case of error
Reader reader = new InputStreamReader(stream, getCharsetName(exchange));
return unmarshaller.unmarshal(reader, parser, headerRowProcessor);
}
String[] headersAsArray() {
if (headers != null) {
return headers.split(",");
} else {
return null;
}
}
/**
* Creates a new instance of the writer settings.
*
* @return New instance of the writer settings
*/
protected abstract CWS createWriterSettings();
/**
* Configures the writer settings.
*
* @param settings Writer settings to configure
*/
protected void configureWriterSettings(CWS settings) {
configureCommonSettings(settings);
if (emptyValue != null) {
settings.setEmptyValue(emptyValue);
}
}
/**
* Creates a new instance of the uniVocity writer.
*
* @param writer Output writer to use
* @param settings Writer settings to use
* @return New uinstance of the uniVocity writer
*/
protected abstract W createWriter(Writer writer, CWS settings);
/**
* Creates a new instance of the parser settings.
*
* @return New instance of the parser settings
*/
protected abstract CPS createParserSettings();
/**
* Configure the parser settings.
*
* @param settings Parser settings to configure
*/
protected void configureParserSettings(CPS settings) {
configureCommonSettings(settings);
if (headerExtractionEnabled != null) {
settings.setHeaderExtractionEnabled(headerExtractionEnabled);
}
if (numberOfRecordsToRead != null) {
settings.setNumberOfRecordsToRead(numberOfRecordsToRead);
}
}
/**
* Creates a new instance of the uniVocity parser.
*
* @param settings Parser settings to use
* @return New instance of the uniVocity parser
*/
protected abstract P createParser(CPS settings);
/**
* Configures the format.
*
* @param format format to configure
*/
protected void configureFormat(F format) {
if (lineSeparator != null) {
format.setLineSeparator(lineSeparator);
}
if (normalizedLineSeparator != null) {
format.setNormalizedNewline(normalizedLineSeparator);
}
if (comment != null) {
format.setComment(comment);
}
}
/**
* Creates and configures the writer settings.
*
* @return new configured instance of the writer settings
*/
final CWS createAndConfigureWriterSettings() {
CWS settings = createWriterSettings();
configureWriterSettings(settings);
configureFormat(settings.getFormat());
return settings;
}
/**
* Creates and configures the parser settings.
*
* @return new configured instance of the parser settings
*/
final CPS createAndConfigureParserSettings() {
CPS settings = createParserSettings();
configureParserSettings(settings);
configureFormat(settings.getFormat());
settings.setProcessor(headerRowProcessor);
return settings;
}
/**
* Configures the common settings shared by parser and writer.
*
* @param settings settings to configure
*/
private void configureCommonSettings(CommonSettings<F> settings) {
if (nullValue != null) {
settings.setNullValue(nullValue);
}
if (skipEmptyLines != null) {
settings.setSkipEmptyLines(skipEmptyLines);
}
if (ignoreTrailingWhitespaces != null) {
settings.setIgnoreTrailingWhitespaces(ignoreTrailingWhitespaces);
}
if (ignoreLeadingWhitespaces != null) {
settings.setIgnoreLeadingWhitespaces(ignoreLeadingWhitespaces);
}
if (headersDisabled) {
settings.setHeaders((String[]) null);
} else if (headers != null) {
settings.setHeaders(headersAsArray());
}
}
public String getNullValue() {
return nullValue;
}
public void setNullValue(String nullValue) {
this.nullValue = nullValue;
}
public Boolean getSkipEmptyLines() {
return skipEmptyLines;
}
public void setSkipEmptyLines(Boolean skipEmptyLines) {
this.skipEmptyLines = skipEmptyLines;
}
public Boolean getIgnoreTrailingWhitespaces() {
return ignoreTrailingWhitespaces;
}
public void setIgnoreTrailingWhitespaces(Boolean ignoreTrailingWhitespaces) {
this.ignoreTrailingWhitespaces = ignoreTrailingWhitespaces;
}
public Boolean getIgnoreLeadingWhitespaces() {
return ignoreLeadingWhitespaces;
}
public void setIgnoreLeadingWhitespaces(Boolean ignoreLeadingWhitespaces) {
this.ignoreLeadingWhitespaces = ignoreLeadingWhitespaces;
}
public boolean isHeadersDisabled() {
return headersDisabled;
}
public void setHeadersDisabled(boolean headersDisabled) {
this.headersDisabled = headersDisabled;
}
public String getHeaders() {
return headers;
}
public void setHeaders(String headers) {
this.headers = headers;
}
public Boolean getHeaderExtractionEnabled() {
return headerExtractionEnabled;
}
public void setHeaderExtractionEnabled(Boolean headerExtractionEnabled) {
this.headerExtractionEnabled = headerExtractionEnabled;
}
public Integer getNumberOfRecordsToRead() {
return numberOfRecordsToRead;
}
public void setNumberOfRecordsToRead(Integer numberOfRecordsToRead) {
this.numberOfRecordsToRead = numberOfRecordsToRead;
}
public String getEmptyValue() {
return emptyValue;
}
public void setEmptyValue(String emptyValue) {
this.emptyValue = emptyValue;
}
public String getLineSeparator() {
return lineSeparator;
}
public void setLineSeparator(String lineSeparator) {
this.lineSeparator = lineSeparator;
}
public Character getNormalizedLineSeparator() {
return normalizedLineSeparator;
}
public void setNormalizedLineSeparator(Character normalizedLineSeparator) {
this.normalizedLineSeparator = normalizedLineSeparator;
}
public Character getComment() {
return comment;
}
public void setComment(Character comment) {
this.comment = comment;
}
public boolean isLazyLoad() {
return lazyLoad;
}
public void setLazyLoad(boolean lazyLoad) {
this.lazyLoad = lazyLoad;
}
public boolean isAsMap() {
return asMap;
}
public void setAsMap(boolean asMap) {
this.asMap = asMap;
}
public CWS getWriterSettings() {
return writerSettings;
}
public void setWriterSettings(CWS writerSettings) {
this.writerSettings = writerSettings;
}
public Marshaller<W> getMarshaller() {
return marshaller;
}
public void setMarshaller(Marshaller<W> marshaller) {
this.marshaller = marshaller;
}
public CPS getParserSettings() {
return parserSettings;
}
public void setParserSettings(CPS parserSettings) {
this.parserSettings = parserSettings;
}
public Unmarshaller<P> getUnmarshaller() {
return unmarshaller;
}
public void setUnmarshaller(Unmarshaller<P> unmarshaller) {
this.unmarshaller = unmarshaller;
}
public HeaderRowProcessor getHeaderRowProcessor() {
return headerRowProcessor;
}
@Override
protected void doStart() throws Exception {
writerSettings = null;
marshaller = null;
parserSettings = null;
unmarshaller = null;
}
@Override
protected void doStop() throws Exception {
// noop
}
}
|
AbstractUniVocityDataFormat
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ExcludeConfigBuildItem.java
|
{
"start": 915,
"end": 1471
}
|
class ____ extends MultiBuildItem {
private final String jarFile;
private final String resourceName;
public ExcludeConfigBuildItem(String jarFile, String resourceName) {
this.jarFile = jarFile;
this.resourceName = resourceName;
}
public ExcludeConfigBuildItem(String jarFile) {
this(jarFile, "/META-INF/native-image/native-image\\.properties");
}
public String getJarFile() {
return jarFile;
}
public String getResourceName() {
return resourceName;
}
}
|
ExcludeConfigBuildItem
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/model/DefaultDependencyManagementInjector.java
|
{
"start": 1989,
"end": 4627
}
|
class ____ extends MavenModelMerger {
public Model mergeManagedDependencies(Model model) {
DependencyManagement dependencyManagement = model.getDependencyManagement();
if (dependencyManagement != null) {
Map<Object, Dependency> dependencies = new HashMap<>();
Map<Object, Object> context = Collections.emptyMap();
for (Dependency dependency : model.getDependencies()) {
Object key = getDependencyKey().apply(dependency);
dependencies.put(key, dependency);
}
boolean modified = false;
for (Dependency managedDependency : dependencyManagement.getDependencies()) {
Object key = getDependencyKey().apply(managedDependency);
Dependency dependency = dependencies.get(key);
if (dependency != null) {
Dependency merged = mergeDependency(dependency, managedDependency, false, context);
if (merged != dependency) {
dependencies.put(key, merged);
modified = true;
}
}
}
if (modified) {
List<Dependency> newDeps = new ArrayList<>(dependencies.size());
for (Dependency dep : model.getDependencies()) {
Object key = getDependencyKey().apply(dep);
Dependency dependency = dependencies.get(key);
newDeps.add(dependency);
}
return Model.newBuilder(model).dependencies(newDeps).build();
}
}
return model;
}
@Override
protected void mergeDependency_Optional(
Dependency.Builder builder,
Dependency target,
Dependency source,
boolean sourceDominant,
Map<Object, Object> context) {
// optional flag is not managed
}
@Override
protected void mergeDependency_Exclusions(
Dependency.Builder builder,
Dependency target,
Dependency source,
boolean sourceDominant,
Map<Object, Object> context) {
List<Exclusion> tgt = target.getExclusions();
if (tgt.isEmpty()) {
List<Exclusion> src = source.getExclusions();
builder.exclusions(src);
}
}
}
}
|
ManagementModelMerger
|
java
|
elastic__elasticsearch
|
plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java
|
{
"start": 1329,
"end": 3082
}
|
class ____ extends Plugin implements AnalysisPlugin, MapperPlugin {
@Override
public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
return singletonMap("icu_normalizer", IcuNormalizerCharFilterFactory::new);
}
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
Map<String, AnalysisProvider<TokenFilterFactory>> extra = new HashMap<>();
extra.put("icu_normalizer", IcuNormalizerTokenFilterFactory::new);
extra.put("icu_folding", IcuFoldingTokenFilterFactory::new);
extra.put("icu_collation", IcuCollationTokenFilterFactory::new);
extra.put("icu_transform", IcuTransformTokenFilterFactory::new);
return extra;
}
@Override
public Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
return singletonMap("icu_analyzer", IcuAnalyzerProvider::new);
}
@Override
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("icu_tokenizer", IcuTokenizerFactory::new);
}
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap(ICUCollationKeywordFieldMapper.CONTENT_TYPE, ICUCollationKeywordFieldMapper.PARSER);
}
@Override
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
return Collections.singletonList(
new NamedWriteableRegistry.Entry(
DocValueFormat.class,
ICUCollationKeywordFieldMapper.CollationFieldType.COLLATE_FORMAT.getWriteableName(),
in -> ICUCollationKeywordFieldMapper.CollationFieldType.COLLATE_FORMAT
)
);
}
}
|
AnalysisICUPlugin
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/dynamic/support/ReflectionUtils.java
|
{
"start": 8064,
"end": 9466
}
|
class ____ introspect
* @param mc the callback to invoke for each method
* @param mf the filter that determines the methods to apply the callback to
*/
public static void doWithMethods(Class<?> clazz, MethodCallback mc, MethodFilter mf) {
// Keep backing up the inheritance hierarchy.
Method[] methods = getDeclaredMethods(clazz);
for (Method method : methods) {
if (mf != null && !mf.matches(method)) {
continue;
}
try {
mc.doWith(method);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access method '" + method.getName() + "': " + ex);
}
}
if (clazz.getSuperclass() != null) {
doWithMethods(clazz.getSuperclass(), mc, mf);
} else if (clazz.isInterface()) {
for (Class<?> superIfc : clazz.getInterfaces()) {
doWithMethods(superIfc, mc, mf);
}
}
}
/**
* This variant retrieves {@link Class#getDeclaredMethods()} from a local cache in order to avoid the JVM's SecurityManager
* check and defensive array copying. In addition, it also includes Java 8 default methods from locally implemented
* interfaces, since those are effectively to be treated just like declared methods.
*
* @param clazz the
|
to
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/bulk/StatusCode.java
|
{
"start": 11616,
"end": 17221
}
|
enum ____ {
ALL_OR_NONE_OPERATION_ROLLED_BACK,
ALREADY_IN_PROCESS,
ASSIGNEE_TYPE_REQUIRED,
BAD_CUSTOM_ENTITY_PARENT_DOMAIN,
BCC_NOT_ALLOWED_IF_BCC_COMPLIANCE_ENABLED,
CANNOT_CASCADE_PRODUCT_ACTIVE,
CANNOT_CHANGE_FIELD_TYPE_OF_APEX_REFERENCED_FIELD,
CANNOT_CREATE_ANOTHER_MANAGED_PACKAGE,
CANNOT_DEACTIVATE_DIVISION,
CANNOT_DELETE_LAST_DATED_CONVERSION_RATE,
CANNOT_DELETE_MANAGED_OBJECT,
CANNOT_DISABLE_LAST_ADMIN,
CANNOT_ENABLE_IP_RESTRICT_REQUESTS,
CANNOT_INSERT_UPDATE_ACTIVATE_ENTITY,
CANNOT_MODIFY_MANAGED_OBJECT,
CANNOT_RENAME_APEX_REFERENCED_FIELD,
CANNOT_RENAME_APEX_REFERENCED_OBJECT,
CANNOT_REPARENT_RECORD,
CANNOT_UPDATE_CONVERTED_LEAD,
CANT_DISABLE_CORP_CURRENCY,
CANT_UNSET_CORP_CURRENCY,
CHILD_SHARE_FAILS_PARENT,
CIRCULAR_DEPENDENCY,
COMMUNITY_NOT_ACCESSIBLE,
CUSTOM_CLOB_FIELD_LIMIT_EXCEEDED,
CUSTOM_ENTITY_OR_FIELD_LIMIT,
CUSTOM_FIELD_INDEX_LIMIT_EXCEEDED,
CUSTOM_INDEX_EXISTS,
CUSTOM_LINK_LIMIT_EXCEEDED,
CUSTOM_TAB_LIMIT_EXCEEDED,
DELETE_FAILED,
DELETE_OPERATION_TOO_LARGE,
DELETE_REQUIRED_ON_CASCADE,
DEPENDENCY_EXISTS,
DUPLICATE_CASE_SOLUTION,
DUPLICATE_COMM_NICKNAME,
DUPLICATE_CUSTOM_ENTITY_DEFINITION,
DUPLICATE_CUSTOM_TAB_MOTIF,
DUPLICATE_DEVELOPER_NAME,
DUPLICATE_EXTERNAL_ID,
DUPLICATE_MASTER_LABEL,
DUPLICATE_SENDER_DISPLAY_NAME,
DUPLICATE_USERNAME,
DUPLICATE_VALUE,
EMAIL_NOT_PROCESSED_DUE_TO_PRIOR_ERROR,
EMPTY_SCONTROL_FILE_NAME,
ENTITY_FAILED_IFLASTMODIFIED_ON_UPDATE,
ENTITY_IS_ARCHIVED,
ENTITY_IS_DELETED,
ENTITY_IS_LOCKED,
ERROR_IN_MAILER,
EXTERNAL_OBJECT_AUTHENTICATION_EXCEPTION,
EXTERNAL_OBJECT_CONNECTION_EXCEPTION,
EXTERNAL_OBJECT_EXCEPTION,
EXTERNAL_OBJECT_UNSUPPORTED_EXCEPTION,
FAILED_ACTIVATION,
FIELD_CUSTOM_VALIDATION_EXCEPTION,
FIELD_FILTER_VALIDATION_EXCEPTION,
FIELD_INTEGRITY_EXCEPTION,
FILTERED_LOOKUP_LIMIT_EXCEEDED,
HTML_FILE_UPLOAD_NOT_ALLOWED,
IMAGE_TOO_LARGE,
INACTIVE_OWNER_OR_USER,
INSUFFICIENT_ACCESS_ON_CROSS_REFERENCE_ENTITY,
INSUFFICIENT_ACCESS_OR_READONLY,
INVALID_ACCESS_LEVEL,
INVALID_ARGUMENT_TYPE,
INVALID_ASSIGNEE_TYPE,
INVALID_ASSIGNMENT_RULE,
INVALID_BATCH_OPERATION,
INVALID_CONTENT_TYPE,
INVALID_CREDIT_CARD_INFO,
INVALID_CROSS_REFERENCE_KEY,
INVALID_CROSS_REFERENCE_TYPE_FOR_FIELD,
INVALID_CURRENCY_CONV_RATE,
INVALID_CURRENCY_CORP_RATE,
INVALID_CURRENCY_ISO,
INVALID_DATA_CATEGORY_GROUP_REFERENCE,
INVALID_DATA_URI,
INVALID_EMAIL_ADDRESS,
INVALID_EMPTY_KEY_OWNER,
INVALID_FIELD,
INVALID_FIELD_FOR_INSERT_UPDATE,
INVALID_FIELD_WHEN_USING_TEMPLATE,
INVALID_FILTER_ACTION,
INVALID_GOOGLE_DOCS_URL,
INVALID_ID_FIELD,
INVALID_INET_ADDRESS,
INVALID_LINEITEM_CLONE_STATE,
INVALID_MASTER_OR_TRANSLATED_SOLUTION,
INVALID_MESSAGE_ID_REFERENCE,
INVALID_OPERATION,
INVALID_OPERATOR,
INVALID_OR_NULL_FOR_RESTRICTED_PICKLIST,
INVALID_PACKAGE_VERSION,
INVALID_PARTNER_NETWORK_STATUS,
INVALID_PERSON_ACCOUNT_OPERATION,
INVALID_QUERY_LOCATOR,
INVALID_READ_ONLY_USER_DML,
INVALID_SAVE_AS_ACTIVITY_FLAG,
INVALID_SESSION_ID,
INVALID_SETUP_OWNER,
INVALID_STATUS,
INVALID_TYPE,
INVALID_TYPE_FOR_OPERATION,
INVALID_TYPE_ON_FIELD_IN_RECORD,
IP_RANGE_LIMIT_EXCEEDED,
LICENSE_LIMIT_EXCEEDED,
LIGHT_PORTAL_USER_EXCEPTION,
LIMIT_EXCEEDED,
MALFORMED_ID,
MANAGER_NOT_DEFINED,
MASSMAIL_RETRY_LIMIT_EXCEEDED,
MASS_MAIL_LIMIT_EXCEEDED,
MAXIMUM_CCEMAILS_EXCEEDED,
MAXIMUM_DASHBOARD_COMPONENTS_EXCEEDED,
MAXIMUM_HIERARCHY_LEVELS_REACHED,
MAXIMUM_SIZE_OF_ATTACHMENT,
MAXIMUM_SIZE_OF_DOCUMENT,
MAX_ACTIONS_PER_RULE_EXCEEDED,
MAX_ACTIVE_RULES_EXCEEDED,
MAX_APPROVAL_STEPS_EXCEEDED,
MAX_FORMULAS_PER_RULE_EXCEEDED,
MAX_RULES_EXCEEDED,
MAX_RULE_ENTRIES_EXCEEDED,
MAX_TASK_DESCRIPTION_EXCEEEDED,
MAX_TM_RULES_EXCEEDED,
MAX_TM_RULE_ITEMS_EXCEEDED,
MERGE_FAILED,
MISSING_ARGUMENT,
MIXED_DML_OPERATION,
NONUNIQUE_SHIPPING_ADDRESS,
NO_APPLICABLE_PROCESS,
NO_ATTACHMENT_PERMISSION,
NO_INACTIVE_DIVISION_MEMBERS,
NO_MASS_MAIL_PERMISSION,
NUMBER_OUTSIDE_VALID_RANGE,
NUM_HISTORY_FIELDS_BY_SOBJECT_EXCEEDED,
OPTED_OUT_OF_MASS_MAIL,
OP_WITH_INVALID_USER_TYPE_EXCEPTION,
PACKAGE_LICENSE_REQUIRED,
PORTAL_NO_ACCESS,
PORTAL_USER_ALREADY_EXISTS_FOR_CONTACT,
PRIVATE_CONTACT_ON_ASSET,
QUERY_TIMEOUT,
RECORD_IN_USE_BY_WORKFLOW,
REQUEST_RUNNING_TOO_LONG,
REQUIRED_FEATURE_MISSING,
REQUIRED_FIELD_MISSING,
SELF_REFERENCE_FROM_TRIGGER,
SHARE_NEEDED_FOR_CHILD_OWNER,
SINGLE_EMAIL_LIMIT_EXCEEDED,
STANDARD_PRICE_NOT_DEFINED,
STORAGE_LIMIT_EXCEEDED,
STRING_TOO_LONG,
TABSET_LIMIT_EXCEEDED,
TEMPLATE_NOT_ACTIVE,
TERRITORY_REALIGN_IN_PROGRESS,
TEXT_DATA_OUTSIDE_SUPPORTED_CHARSET,
TOO_MANY_APEX_REQUESTS,
TOO_MANY_ENUM_VALUE,
TRANSFER_REQUIRES_READ,
UNABLE_TO_LOCK_ROW,
UNAVAILABLE_RECORDTYPE_EXCEPTION,
UNDELETE_FAILED,
UNKNOWN_EXCEPTION,
UNSPECIFIED_EMAIL_ADDRESS,
UNSUPPORTED_APEX_TRIGGER_OPERATON,
UNVERIFIED_SENDER_ADDRESS,
USER_OWNS_PORTAL_ACCOUNT_EXCEPTION,
USER_WITH_APEX_SHARES_EXCEPTION,
WEBLINK_SIZE_LIMIT_EXCEEDED,
WRONG_CONTROLLER_TYPE;
public String value() {
return name();
}
public static StatusCode fromValue(String v) {
return valueOf(v);
}
}
|
StatusCode
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/LambdaFunctionalInterface.java
|
{
"start": 6084,
"end": 10294
}
|
class ____ to meet the following
* conditions as well:
* 3.1: lambda argument of Kind.LAMBDA_EXPRESSION
* 3.2: same as 2.1
* 3.3: same as 2.2
* </pre>
*
* <pre>
* Refactoring Changes for matched methods:
* (1) Add the imports
* (2) Change the method signature to use utility function instead of Function
* (3) Find and change the 'apply' calls to the corresponding applyAsT
* </pre>
*/
@Override
public Description matchMethod(MethodTree tree, VisitorState state) {
MethodSymbol methodSym = ASTHelpers.getSymbol(tree);
// precondition (1)
if (!ASTHelpers.canBeRemoved(methodSym, state)) {
return Description.NO_MATCH;
}
ImmutableList<Tree> params =
tree.getParameters().stream()
.filter(param -> hasFunctionAsArg(param, state))
.filter(
param ->
isFunctionArgSubtypeOf(
param, 0, state.getTypeFromString(JAVA_LANG_NUMBER), state)
|| isFunctionArgSubtypeOf(
param, 1, state.getTypeFromString(JAVA_LANG_NUMBER), state))
.collect(toImmutableList());
// preconditions (2) and (3)
if (params.isEmpty() || !methodCallsMeetConditions(methodSym, state)) {
return Description.NO_MATCH;
}
SuggestedFix.Builder fixBuilder = SuggestedFix.builder();
for (Tree param : params) {
getMappingForFunctionFromTree(param)
.ifPresent(
mappedFunction -> {
fixBuilder.addImport(getImportName(mappedFunction));
fixBuilder.replace(
param,
getFunctionName(mappedFunction) + " " + ASTHelpers.getSymbol(param).name);
refactorInternalApplyMethods(tree, fixBuilder, param, mappedFunction);
});
}
return describeMatch(tree, fixBuilder.build());
}
private void refactorInternalApplyMethods(
MethodTree tree, SuggestedFix.Builder fixBuilder, Tree param, String mappedFunction) {
getMappingForApply(mappedFunction)
.ifPresent(
apply -> {
tree.accept(
new TreeScanner<Void, Void>() {
@Override
public Void visitMethodInvocation(MethodInvocationTree callTree, Void unused) {
if (getSymbol(callTree).name.contentEquals("apply")) {
Symbol receiverSym = getSymbol(getReceiver(callTree));
if (receiverSym != null
&& receiverSym.equals(ASTHelpers.getSymbol(param))) {
fixBuilder.replace(
callTree.getMethodSelect(), receiverSym.name + "." + apply);
}
}
return super.visitMethodInvocation(callTree, null);
}
},
null);
});
}
private boolean methodCallsMeetConditions(Symbol sym, VisitorState state) {
ImmutableMultimap<String, MethodInvocationTree> methodCallMap =
methodCallsForSymbol(sym, getTopLevelClassTree(state));
if (methodCallMap.isEmpty()) {
// no method invocations for this method, safe to refactor
return true;
}
for (MethodInvocationTree methodInvocationTree : methodCallMap.values()) {
if (methodInvocationTree.getArguments().stream()
.filter(a -> a instanceof LambdaExpressionTree)
.filter(a -> hasFunctionAsArg(a, state))
.noneMatch(
a ->
isFunctionArgSubtypeOf(a, 0, state.getTypeFromString(JAVA_LANG_NUMBER), state)
|| isFunctionArgSubtypeOf(
a, 1, state.getTypeFromString(JAVA_LANG_NUMBER), state))) {
return false;
}
}
return true;
}
private static ClassTree getTopLevelClassTree(VisitorState state) {
return findLast(
stream(state.getPath().iterator())
.filter(ClassTree.class::isInstance)
.map(ClassTree.class::cast))
.orElseThrow(() -> new IllegalArgumentException("No enclosing
|
have
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestDelegationTokenAuthenticationHandlerWithMocks.java
|
{
"start": 2740,
"end": 22856
}
|
class ____
extends DelegationTokenAuthenticationHandler {
public MockDelegationTokenAuthenticationHandler() {
super(new AuthenticationHandler() {
@Override
public String getType() {
return "T";
}
@Override
public void init(Properties config) throws ServletException {
}
@Override
public void destroy() {
}
@Override
public boolean managementOperation(AuthenticationToken token,
HttpServletRequest request, HttpServletResponse response)
throws IOException, AuthenticationException {
return false;
}
@Override
public AuthenticationToken authenticate(HttpServletRequest request,
HttpServletResponse response)
throws IOException, AuthenticationException {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, "mock");
return null;
}
});
}
}
private DelegationTokenAuthenticationHandler handler;
@BeforeEach
public void setUp() throws Exception {
Properties conf = new Properties();
conf.put(KerberosDelegationTokenAuthenticationHandler.TOKEN_KIND, "foo");
handler = new MockDelegationTokenAuthenticationHandler();
handler.initTokenManager(conf);
}
@AfterEach
public void cleanUp() {
handler.destroy();
}
@Test
public void testManagementOperations() throws Exception {
final Text testTokenKind = new Text("foo");
final String testRenewer = "bar";
final String testService = "192.168.64.101:8888";
testNonManagementOperation();
testManagementOperationErrors();
testGetToken(null, null, testTokenKind);
testGetToken(testRenewer, null, testTokenKind);
testCancelToken();
testRenewToken(testRenewer);
// Management operations against token requested with service parameter
Token<DelegationTokenIdentifier> testToken =
testGetToken(testRenewer, testService, testTokenKind);
testRenewToken(testToken, testRenewer);
testCancelToken(testToken);
}
private void testNonManagementOperation() throws Exception {
HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getParameter(
DelegationTokenAuthenticator.OP_PARAM)).thenReturn(null);
assertTrue(handler.managementOperation(null, request, null));
when(request.getParameter(
DelegationTokenAuthenticator.OP_PARAM)).thenReturn("CREATE");
assertTrue(handler.managementOperation(null, request, null));
}
private void testManagementOperationErrors() throws Exception {
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getQueryString()).thenReturn(
DelegationTokenAuthenticator.OP_PARAM + "=" +
DelegationTokenAuthenticator.DelegationTokenOperation.
GETDELEGATIONTOKEN.toString()
);
when(request.getMethod()).thenReturn("FOO");
assertFalse(handler.managementOperation(null, request, response));
verify(response).sendError(
eq(HttpServletResponse.SC_BAD_REQUEST),
startsWith("Wrong HTTP method"));
reset(response);
when(request.getMethod()).thenReturn(
DelegationTokenAuthenticator.DelegationTokenOperation.
GETDELEGATIONTOKEN.getHttpMethod()
);
assertFalse(handler.managementOperation(null, request, response));
verify(response).setStatus(
eq(HttpServletResponse.SC_UNAUTHORIZED));
verify(response).setHeader(
eq(KerberosAuthenticator.WWW_AUTHENTICATE),
eq("mock"));
}
private Token<DelegationTokenIdentifier> testGetToken(String renewer,
String service, Text expectedTokenKind) throws Exception {
DelegationTokenAuthenticator.DelegationTokenOperation op =
DelegationTokenAuthenticator.DelegationTokenOperation.
GETDELEGATIONTOKEN;
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getQueryString()).
thenReturn(DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString());
when(request.getMethod()).thenReturn(op.getHttpMethod());
AuthenticationToken token = mock(AuthenticationToken.class);
when(token.getUserName()).thenReturn("user");
when(response.getWriter()).thenReturn(new PrintWriter(
new StringWriter()));
assertFalse(handler.managementOperation(token, request, response));
String queryString =
DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString() + "&" +
DelegationTokenAuthenticator.RENEWER_PARAM + "=" + renewer;
if (service != null) {
queryString += "&" + DelegationTokenAuthenticator.SERVICE_PARAM + "="
+ service;
}
when(request.getQueryString()).thenReturn(queryString);
reset(response);
reset(token);
when(token.getUserName()).thenReturn("user");
StringWriter writer = new StringWriter();
PrintWriter pwriter = new PrintWriter(writer);
when(response.getWriter()).thenReturn(pwriter);
assertFalse(handler.managementOperation(token, request, response));
if (renewer == null) {
verify(token).getUserName();
} else {
verify(token).getUserName();
}
verify(response).setStatus(HttpServletResponse.SC_OK);
verify(response).setContentType(MediaType.APPLICATION_JSON);
pwriter.close();
String responseOutput = writer.toString();
String tokenLabel = DelegationTokenAuthenticator.
DELEGATION_TOKEN_JSON;
assertTrue(responseOutput.contains(tokenLabel));
assertTrue(responseOutput.contains(
DelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON));
ObjectMapper jsonMapper = new ObjectMapper();
Map json = jsonMapper.readValue(responseOutput, Map.class);
json = (Map) json.get(tokenLabel);
String tokenStr;
tokenStr = (String) json.get(DelegationTokenAuthenticator.
DELEGATION_TOKEN_URL_STRING_JSON);
Token<DelegationTokenIdentifier> dt = new Token<DelegationTokenIdentifier>();
dt.decodeFromUrlString(tokenStr);
handler.getTokenManager().verifyToken(dt);
assertEquals(expectedTokenKind, dt.getKind());
if (service != null) {
assertEquals(service, dt.getService().toString());
} else {
assertEquals(0, dt.getService().getLength());
}
return dt;
}
@SuppressWarnings("unchecked")
private void testCancelToken() throws Exception {
Token<DelegationTokenIdentifier> token =
(Token<DelegationTokenIdentifier>) handler.getTokenManager()
.createToken(UserGroupInformation.getCurrentUser(), "foo");
testCancelToken(token);
}
@SuppressWarnings("unchecked")
private void testCancelToken(Token<DelegationTokenIdentifier> token)
throws Exception {
DelegationTokenAuthenticator.DelegationTokenOperation op =
DelegationTokenAuthenticator.DelegationTokenOperation.
CANCELDELEGATIONTOKEN;
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getQueryString()).thenReturn(
DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString());
when(request.getMethod()).
thenReturn(op.getHttpMethod());
assertFalse(handler.managementOperation(null, request, response));
verify(response).sendError(
eq(HttpServletResponse.SC_BAD_REQUEST),
contains("requires the parameter [token]"));
reset(response);
when(request.getQueryString()).thenReturn(
DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString() + "&" +
DelegationTokenAuthenticator.TOKEN_PARAM + "=" +
token.encodeToUrlString()
);
assertFalse(handler.managementOperation(null, request, response));
verify(response).setStatus(HttpServletResponse.SC_OK);
try {
handler.getTokenManager().verifyToken(token);
fail();
} catch (SecretManager.InvalidToken ex) {
//NOP
} catch (Throwable ex) {
fail();
}
}
@SuppressWarnings("unchecked")
private void testRenewToken(String testRenewer) throws Exception {
Token<DelegationTokenIdentifier> dToken = (Token<DelegationTokenIdentifier>)
handler.getTokenManager().createToken(
UserGroupInformation.getCurrentUser(), testRenewer);
testRenewToken(dToken, testRenewer);
}
@SuppressWarnings("unchecked")
private void testRenewToken(Token<DelegationTokenIdentifier> dToken,
String testRenewer) throws Exception {
DelegationTokenAuthenticator.DelegationTokenOperation op =
DelegationTokenAuthenticator.DelegationTokenOperation.
RENEWDELEGATIONTOKEN;
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getQueryString()).
thenReturn(DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString());
when(request.getMethod()).
thenReturn(op.getHttpMethod());
assertFalse(handler.managementOperation(null, request, response));
verify(response).setStatus(
eq(HttpServletResponse.SC_UNAUTHORIZED));
verify(response).setHeader(eq(
KerberosAuthenticator.WWW_AUTHENTICATE),
eq("mock")
);
reset(response);
AuthenticationToken token = mock(AuthenticationToken.class);
when(token.getUserName()).thenReturn(testRenewer);
assertFalse(handler.managementOperation(token, request, response));
verify(response).sendError(
eq(HttpServletResponse.SC_BAD_REQUEST),
contains("requires the parameter [token]"));
reset(response);
StringWriter writer = new StringWriter();
PrintWriter pwriter = new PrintWriter(writer);
when(response.getWriter()).thenReturn(pwriter);
when(request.getQueryString()).
thenReturn(DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString() +
"&" + DelegationTokenAuthenticator.TOKEN_PARAM + "=" +
dToken.encodeToUrlString());
assertFalse(handler.managementOperation(token, request, response));
verify(response).setStatus(HttpServletResponse.SC_OK);
pwriter.close();
assertTrue(writer.toString().contains("long"));
handler.getTokenManager().verifyToken(dToken);
}
@Test
public void testAuthenticate() throws Exception {
testValidDelegationTokenQueryString();
testValidDelegationTokenHeader();
testInvalidDelegationTokenQueryString();
testInvalidDelegationTokenHeader();
}
@SuppressWarnings("unchecked")
private void testValidDelegationTokenQueryString() throws Exception {
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
Token<DelegationTokenIdentifier> dToken =
(Token<DelegationTokenIdentifier>) handler.getTokenManager().createToken(
UserGroupInformation.getCurrentUser(), "user");
when(request.getQueryString()).thenReturn(
DelegationTokenAuthenticator.DELEGATION_PARAM + "=" +
dToken.encodeToUrlString());
AuthenticationToken token = handler.authenticate(request, response);
assertEquals(UserGroupInformation.getCurrentUser().
getShortUserName(), token.getUserName());
assertEquals(0, token.getExpires());
assertEquals(handler.getType(),
token.getType());
assertTrue(token.isExpired());
}
@SuppressWarnings("unchecked")
private void testValidDelegationTokenHeader() throws Exception {
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
Token<DelegationTokenIdentifier> dToken =
(Token<DelegationTokenIdentifier>) handler.getTokenManager().createToken(
UserGroupInformation.getCurrentUser(), "user");
when(request.getHeader(eq(
DelegationTokenAuthenticator.DELEGATION_TOKEN_HEADER))).thenReturn(
dToken.encodeToUrlString());
AuthenticationToken token = handler.authenticate(request, response);
assertEquals(UserGroupInformation.getCurrentUser().
getShortUserName(), token.getUserName());
assertEquals(0, token.getExpires());
assertEquals(handler.getType(),
token.getType());
assertTrue(token.isExpired());
}
private void testInvalidDelegationTokenQueryString() throws Exception {
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getQueryString()).thenReturn(
DelegationTokenAuthenticator.DELEGATION_PARAM + "=invalid");
StringWriter writer = new StringWriter();
when(response.getWriter()).thenReturn(new PrintWriter(writer));
assertNull(handler.authenticate(request, response));
verify(response).setStatus(HttpServletResponse.SC_FORBIDDEN);
assertTrue(writer.toString().contains("AuthenticationException"));
}
private void testInvalidDelegationTokenHeader() throws Exception {
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getHeader(eq(
DelegationTokenAuthenticator.DELEGATION_TOKEN_HEADER))).thenReturn(
"invalid");
StringWriter writer = new StringWriter();
when(response.getWriter()).thenReturn(new PrintWriter(writer));
assertNull(handler.authenticate(request, response));
assertTrue(writer.toString().contains("AuthenticationException"));
}
private String getToken() throws Exception {
DelegationTokenAuthenticator.DelegationTokenOperation op =
DelegationTokenAuthenticator.DelegationTokenOperation.
GETDELEGATIONTOKEN;
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getQueryString()).
thenReturn(DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString());
when(request.getMethod()).thenReturn(op.getHttpMethod());
AuthenticationToken token = mock(AuthenticationToken.class);
when(token.getUserName()).thenReturn("user");
when(response.getWriter()).thenReturn(new PrintWriter(
new StringWriter()));
assertFalse(handler.managementOperation(token, request, response));
when(request.getQueryString()).
thenReturn(DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString() +
"&" + DelegationTokenAuthenticator.RENEWER_PARAM + "=" + null);
reset(response);
reset(token);
when(token.getUserName()).thenReturn("user");
StringWriter writer = new StringWriter();
PrintWriter pwriter = new PrintWriter(writer);
when(response.getWriter()).thenReturn(pwriter);
assertFalse(handler.managementOperation(token, request, response));
verify(token).getUserName();
verify(response).setStatus(HttpServletResponse.SC_OK);
verify(response).setContentType(MediaType.APPLICATION_JSON);
pwriter.close();
String responseOutput = writer.toString();
String tokenLabel = DelegationTokenAuthenticator.
DELEGATION_TOKEN_JSON;
assertTrue(responseOutput.contains(tokenLabel));
assertTrue(responseOutput.contains(
DelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON));
ObjectMapper jsonMapper = new ObjectMapper();
Map json = jsonMapper.readValue(responseOutput, Map.class);
json = (Map) json.get(tokenLabel);
String tokenStr;
tokenStr = (String) json.get(DelegationTokenAuthenticator.
DELEGATION_TOKEN_URL_STRING_JSON);
Token<DelegationTokenIdentifier> dt = new Token<DelegationTokenIdentifier>();
dt.decodeFromUrlString(tokenStr);
handler.getTokenManager().verifyToken(dt);
return tokenStr;
}
@Test
public void testCannotGetTokenUsingToken() throws Exception {
DelegationTokenAuthenticator.DelegationTokenOperation op =
DelegationTokenAuthenticator.DelegationTokenOperation.
GETDELEGATIONTOKEN;
HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getMethod()).thenReturn(op.getHttpMethod());
HttpServletResponse response = mock(HttpServletResponse.class);
when(response.getWriter()).thenReturn(new PrintWriter(
new StringWriter()));
String tokenStr = getToken();
// Try get a new token using the fetched token, should get 401.
when(request.getQueryString()).
thenReturn(DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString() +
"&" + DelegationTokenAuthenticator.RENEWER_PARAM + "=" + null +
"&" + DelegationTokenAuthenticator.DELEGATION_PARAM + "=" + tokenStr);
reset(response);
StringWriter writer = new StringWriter();
PrintWriter pwriter = new PrintWriter(writer);
when(response.getWriter()).thenReturn(pwriter);
assertFalse(handler.managementOperation(null, request, response));
verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
@Test
public void testCannotRenewTokenUsingToken() throws Exception {
DelegationTokenAuthenticator.DelegationTokenOperation op =
DelegationTokenAuthenticator.DelegationTokenOperation.
RENEWDELEGATIONTOKEN;
HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getMethod()).thenReturn(op.getHttpMethod());
HttpServletResponse response = mock(HttpServletResponse.class);
when(response.getWriter()).thenReturn(new PrintWriter(
new StringWriter()));
String tokenStr = getToken();
// Try renew a token using itself, should get 401.
when(request.getQueryString()).
thenReturn(DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString() +
"&" + DelegationTokenAuthenticator.TOKEN_PARAM + "=" + tokenStr +
"&" + DelegationTokenAuthenticator.DELEGATION_PARAM + "=" + tokenStr);
reset(response);
StringWriter writer = new StringWriter();
PrintWriter pwriter = new PrintWriter(writer);
when(response.getWriter()).thenReturn(pwriter);
assertFalse(handler.managementOperation(null, request, response));
verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
@Test
public void testWriterNotClosed() throws Exception {
Properties conf = new Properties();
conf.put(KerberosDelegationTokenAuthenticationHandler.TOKEN_KIND, "foo");
conf.put(DelegationTokenAuthenticationHandler.JSON_MAPPER_PREFIX
+ "AUTO_CLOSE_TARGET", "false");
DelegationTokenAuthenticationHandler noAuthCloseHandler =
new MockDelegationTokenAuthenticationHandler();
try {
noAuthCloseHandler.initTokenManager(conf);
noAuthCloseHandler.initJsonFactory(conf);
DelegationTokenAuthenticator.DelegationTokenOperation op =
GETDELEGATIONTOKEN;
HttpServletRequest request = mock(HttpServletRequest.class);
HttpServletResponse response = mock(HttpServletResponse.class);
when(request.getQueryString()).thenReturn(
DelegationTokenAuthenticator.OP_PARAM + "=" + op.toString());
when(request.getMethod()).thenReturn(op.getHttpMethod());
AuthenticationToken token = mock(AuthenticationToken.class);
when(token.getUserName()).thenReturn("user");
final MutableBoolean closed = new MutableBoolean();
PrintWriter printWriterCloseCount = new PrintWriter(new StringWriter()) {
@Override
public void close() {
closed.setValue(true);
super.close();
}
@Override
public void write(String str) {
if (closed.booleanValue()) {
throw new RuntimeException("already closed!");
}
super.write(str);
}
};
when(response.getWriter()).thenReturn(printWriterCloseCount);
assertFalse(noAuthCloseHandler.managementOperation(token, request,
response));
} finally {
noAuthCloseHandler.destroy();
}
}
}
|
MockDelegationTokenAuthenticationHandler
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/socket/oio/OioSocketChannelConfig.java
|
{
"start": 1545,
"end": 3651
}
|
interface ____ extends SocketChannelConfig {
/**
* Sets the maximal time a operation on the underlying socket may block.
*/
OioSocketChannelConfig setSoTimeout(int timeout);
/**
* Returns the maximal time a operation on the underlying socket may block.
*/
int getSoTimeout();
@Override
OioSocketChannelConfig setTcpNoDelay(boolean tcpNoDelay);
@Override
OioSocketChannelConfig setSoLinger(int soLinger);
@Override
OioSocketChannelConfig setSendBufferSize(int sendBufferSize);
@Override
OioSocketChannelConfig setReceiveBufferSize(int receiveBufferSize);
@Override
OioSocketChannelConfig setKeepAlive(boolean keepAlive);
@Override
OioSocketChannelConfig setTrafficClass(int trafficClass);
@Override
OioSocketChannelConfig setReuseAddress(boolean reuseAddress);
@Override
OioSocketChannelConfig setPerformancePreferences(int connectionTime, int latency, int bandwidth);
@Override
OioSocketChannelConfig setAllowHalfClosure(boolean allowHalfClosure);
@Override
OioSocketChannelConfig setConnectTimeoutMillis(int connectTimeoutMillis);
@Override
@Deprecated
OioSocketChannelConfig setMaxMessagesPerRead(int maxMessagesPerRead);
@Override
OioSocketChannelConfig setWriteSpinCount(int writeSpinCount);
@Override
OioSocketChannelConfig setAllocator(ByteBufAllocator allocator);
@Override
OioSocketChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator);
@Override
OioSocketChannelConfig setAutoRead(boolean autoRead);
@Override
OioSocketChannelConfig setAutoClose(boolean autoClose);
@Override
OioSocketChannelConfig setWriteBufferHighWaterMark(int writeBufferHighWaterMark);
@Override
OioSocketChannelConfig setWriteBufferLowWaterMark(int writeBufferLowWaterMark);
@Override
OioSocketChannelConfig setWriteBufferWaterMark(WriteBufferWaterMark writeBufferWaterMark);
@Override
OioSocketChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator);
}
|
OioSocketChannelConfig
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java
|
{
"start": 3406,
"end": 9853
}
|
class ____ extends AbstractWatcherIntegrationTestCase {
private MockWebServer webServer = new MockWebServer();
private MockResponse mockResponse = new MockResponse().setResponseCode(200)
.addHeader("Content-Type", "application/foo")
.setBody("This is the content");
private EmailServer server;
@Override
public void setUp() throws Exception {
super.setUp();
webServer.enqueue(mockResponse);
webServer.start();
server = EmailServer.localhost(logger);
}
@After
public void cleanup() throws Exception {
server.stop();
webServer.close();
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.notification.email.account.test.smtp.secure_password", EmailServer.PASSWORD);
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put("xpack.notification.email.account.test.smtp.auth", true)
.put("xpack.notification.email.account.test.smtp.user", EmailServer.USERNAME)
.put("xpack.notification.email.account.test.smtp.port", server.port())
.put("xpack.notification.email.account.test.smtp.host", "localhost")
.setSecureSettings(secureSettings)
.build();
}
public List<String> getAttachments(MimeMessage message) throws Exception {
Object content = message.getContent();
if (content instanceof String) return null;
if (content instanceof Multipart multipart) {
List<String> result = new ArrayList<>();
for (int i = 0; i < multipart.getCount(); i++) {
result.addAll(getAttachments(multipart.getBodyPart(i)));
}
return result;
}
return null;
}
private List<String> getAttachments(BodyPart part) throws Exception {
List<String> result = new ArrayList<>();
Object content = part.getContent();
if (content instanceof InputStream || content instanceof String) {
if (Part.ATTACHMENT.equalsIgnoreCase(part.getDisposition()) || Strings.hasLength(part.getFileName())) {
result.add(Streams.copyToString(new InputStreamReader(part.getInputStream(), StandardCharsets.UTF_8)));
return result;
} else {
return new ArrayList<>();
}
}
if (content instanceof Multipart multipart) {
for (int i = 0; i < multipart.getCount(); i++) {
BodyPart bodyPart = multipart.getBodyPart(i);
result.addAll(getAttachments(bodyPart));
}
}
return result;
}
public void testThatEmailAttachmentsAreSent() throws Exception {
org.elasticsearch.xpack.watcher.notification.email.DataAttachment dataFormat = randomFrom(JSON, YAML);
final CountDownLatch latch = new CountDownLatch(1);
server.addListener(message -> {
assertThat(message.getSubject(), equalTo("Subject"));
List<String> attachments = getAttachments(message);
if (dataFormat == YAML) {
assertThat(attachments, hasItem(allOf(startsWith("---"), containsString("_test_id"))));
} else {
assertThat(attachments, hasItem(allOf(startsWith("{"), containsString("_test_id"))));
}
assertThat(attachments, hasItem(containsString("This is the content")));
latch.countDown();
});
createIndex("idx");
// Have a sample document in the index, the watch is going to evaluate
prepareIndex("idx").setSource("field", "value").get();
refresh();
List<EmailAttachmentParser.EmailAttachment> attachments = new ArrayList<>();
DataAttachment dataAttachment = DataAttachment.builder("my-id").dataAttachment(dataFormat).build();
attachments.add(dataAttachment);
HttpRequestTemplate requestTemplate = HttpRequestTemplate.builder("localhost", webServer.getPort())
.path("/")
.scheme(Scheme.HTTP)
.build();
HttpRequestAttachment httpRequestAttachment = HttpRequestAttachment.builder("other-id")
.httpRequestTemplate(requestTemplate)
.build();
attachments.add(httpRequestAttachment);
EmailAttachments emailAttachments = new EmailAttachments(attachments);
XContentBuilder tmpBuilder = jsonBuilder();
tmpBuilder.startObject();
emailAttachments.toXContent(tmpBuilder, ToXContent.EMPTY_PARAMS);
tmpBuilder.endObject();
EmailTemplate.Builder emailBuilder = EmailTemplate.builder().from("from@example.org").to("to@example.org").subject("Subject");
WatchSourceBuilder watchSourceBuilder = watchBuilder().trigger(schedule(interval(5, IntervalSchedule.Interval.Unit.SECONDS)))
.input(noneInput())
.condition(InternalAlwaysCondition.INSTANCE)
.addAction(
"_email",
emailAction(emailBuilder).setAuthentication(EmailServer.USERNAME, EmailServer.PASSWORD.toCharArray())
.setAttachments(emailAttachments)
);
new PutWatchRequestBuilder(client()).setId("_test_id").setSource(watchSourceBuilder).get();
timeWarp().trigger("_test_id");
refresh();
assertBusy(() -> {
SearchResponse searchResponse;
try {
searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setQuery(
QueryBuilders.termQuery("watch_id", "_test_id")
).get();
} catch (SearchPhaseExecutionException e) {
if (e.getCause() instanceof NoShardAvailableActionException) {
// Nothing has created the index yet
searchResponse = null;
} else {
throw e;
}
}
assertNotNull(searchResponse);
try {
assertHitCount(searchResponse, 1);
} finally {
searchResponse.decRef();
}
});
if (latch.await(5, TimeUnit.SECONDS) == false) {
fail("waited too long for email to be received");
}
}
}
|
EmailAttachmentTests
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/authentication/OAuth2DeviceAuthorizationConsentAuthenticationProvider.java
|
{
"start": 2920,
"end": 12474
}
|
class ____ implements AuthenticationProvider {
private static final String ERROR_URI = "https://datatracker.ietf.org/doc/html/rfc6749#section-5.2";
static final OAuth2TokenType STATE_TOKEN_TYPE = new OAuth2TokenType(OAuth2ParameterNames.STATE);
private final Log logger = LogFactory.getLog(getClass());
private final RegisteredClientRepository registeredClientRepository;
private final OAuth2AuthorizationService authorizationService;
private final OAuth2AuthorizationConsentService authorizationConsentService;
private Consumer<OAuth2AuthorizationConsentAuthenticationContext> authorizationConsentCustomizer;
/**
* Constructs an {@code OAuth2DeviceAuthorizationConsentAuthenticationProvider} using
* the provided parameters.
* @param registeredClientRepository the repository of registered clients
* @param authorizationService the authorization service
* @param authorizationConsentService the authorization consent service
*/
public OAuth2DeviceAuthorizationConsentAuthenticationProvider(RegisteredClientRepository registeredClientRepository,
OAuth2AuthorizationService authorizationService,
OAuth2AuthorizationConsentService authorizationConsentService) {
Assert.notNull(registeredClientRepository, "registeredClientRepository cannot be null");
Assert.notNull(authorizationService, "authorizationService cannot be null");
Assert.notNull(authorizationConsentService, "authorizationConsentService cannot be null");
this.registeredClientRepository = registeredClientRepository;
this.authorizationService = authorizationService;
this.authorizationConsentService = authorizationConsentService;
}
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
OAuth2DeviceAuthorizationConsentAuthenticationToken deviceAuthorizationConsentAuthentication = (OAuth2DeviceAuthorizationConsentAuthenticationToken) authentication;
OAuth2Authorization authorization = this.authorizationService
.findByToken(deviceAuthorizationConsentAuthentication.getState(), STATE_TOKEN_TYPE);
if (authorization == null) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, OAuth2ParameterNames.STATE);
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved authorization with device authorization consent state");
}
// The authorization must be associated to the current principal
Authentication principal = (Authentication) deviceAuthorizationConsentAuthentication.getPrincipal();
if (!isPrincipalAuthenticated(principal) || !principal.getName().equals(authorization.getPrincipalName())) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, OAuth2ParameterNames.STATE);
}
RegisteredClient registeredClient = this.registeredClientRepository
.findByClientId(deviceAuthorizationConsentAuthentication.getClientId());
if (registeredClient == null || !registeredClient.getId().equals(authorization.getRegisteredClientId())) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, OAuth2ParameterNames.CLIENT_ID);
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved registered client");
}
Set<String> requestedScopes = authorization.getAttribute(OAuth2ParameterNames.SCOPE);
Set<String> authorizedScopes = new HashSet<>(deviceAuthorizationConsentAuthentication.getScopes());
if (!requestedScopes.containsAll(authorizedScopes)) {
throwError(OAuth2ErrorCodes.INVALID_SCOPE, OAuth2ParameterNames.SCOPE);
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Validated device authorization consent request parameters");
}
OAuth2AuthorizationConsent currentAuthorizationConsent = this.authorizationConsentService
.findById(authorization.getRegisteredClientId(), principal.getName());
Set<String> currentAuthorizedScopes = (currentAuthorizationConsent != null)
? currentAuthorizationConsent.getScopes() : Collections.emptySet();
if (!currentAuthorizedScopes.isEmpty()) {
for (String requestedScope : requestedScopes) {
if (currentAuthorizedScopes.contains(requestedScope)) {
authorizedScopes.add(requestedScope);
}
}
}
OAuth2AuthorizationConsent.Builder authorizationConsentBuilder;
if (currentAuthorizationConsent != null) {
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved existing authorization consent");
}
authorizationConsentBuilder = OAuth2AuthorizationConsent.from(currentAuthorizationConsent);
}
else {
authorizationConsentBuilder = OAuth2AuthorizationConsent.withId(authorization.getRegisteredClientId(),
principal.getName());
}
authorizedScopes.forEach(authorizationConsentBuilder::scope);
if (this.authorizationConsentCustomizer != null) {
// @formatter:off
OAuth2AuthorizationConsentAuthenticationContext authorizationConsentAuthenticationContext =
OAuth2AuthorizationConsentAuthenticationContext.with(deviceAuthorizationConsentAuthentication)
.authorizationConsent(authorizationConsentBuilder)
.registeredClient(registeredClient)
.authorization(authorization)
.build();
// @formatter:on
this.authorizationConsentCustomizer.accept(authorizationConsentAuthenticationContext);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Customized authorization consent");
}
}
Set<GrantedAuthority> authorities = new HashSet<>();
authorizationConsentBuilder.authorities(authorities::addAll);
OAuth2Authorization.Token<OAuth2DeviceCode> deviceCodeToken = authorization.getToken(OAuth2DeviceCode.class);
OAuth2Authorization.Token<OAuth2UserCode> userCodeToken = authorization.getToken(OAuth2UserCode.class);
if (authorities.isEmpty()) {
// Authorization consent denied (or revoked)
if (currentAuthorizationConsent != null) {
this.authorizationConsentService.remove(currentAuthorizationConsent);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Revoked authorization consent");
}
}
authorization = OAuth2Authorization.from(authorization)
.invalidate(deviceCodeToken.getToken())
.invalidate(userCodeToken.getToken())
.attributes((attrs) -> attrs.remove(OAuth2ParameterNames.STATE))
.build();
this.authorizationService.save(authorization);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Invalidated device code and user code because authorization consent was denied");
}
throwError(OAuth2ErrorCodes.ACCESS_DENIED, OAuth2ParameterNames.CLIENT_ID);
}
OAuth2AuthorizationConsent authorizationConsent = authorizationConsentBuilder.build();
if (!authorizationConsent.equals(currentAuthorizationConsent)) {
this.authorizationConsentService.save(authorizationConsent);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Saved authorization consent");
}
}
authorization = OAuth2Authorization.from(authorization)
.authorizedScopes(authorizedScopes)
.invalidate(userCodeToken.getToken())
.attributes((attrs) -> attrs.remove(OAuth2ParameterNames.STATE))
.attributes((attrs) -> attrs.remove(OAuth2ParameterNames.SCOPE))
.build();
this.authorizationService.save(authorization);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Saved authorization with authorized scopes");
// This log is kept separate for consistency with other providers
this.logger.trace("Authenticated device authorization consent request");
}
return new OAuth2DeviceVerificationAuthenticationToken(principal,
deviceAuthorizationConsentAuthentication.getUserCode(), registeredClient.getClientId());
}
@Override
public boolean supports(Class<?> authentication) {
return OAuth2DeviceAuthorizationConsentAuthenticationToken.class.isAssignableFrom(authentication);
}
/**
* Sets the {@code Consumer} providing access to the
* {@link OAuth2AuthorizationConsentAuthenticationContext} containing an
* {@link OAuth2AuthorizationConsent.Builder} and additional context information.
*
* <p>
* The following context attributes are available:
* <ul>
* <li>The {@link OAuth2AuthorizationConsent.Builder} used to build the authorization
* consent prior to
* {@link OAuth2AuthorizationConsentService#save(OAuth2AuthorizationConsent)}.</li>
* <li>The {@link Authentication} of type
* {@link OAuth2DeviceAuthorizationConsentAuthenticationToken}.</li>
* <li>The {@link RegisteredClient} associated with the device authorization
* request.</li>
* <li>The {@link OAuth2Authorization} associated with the state token presented in
* the device authorization consent request.</li>
* </ul>
* @param authorizationConsentCustomizer the {@code Consumer} providing access to the
* {@link OAuth2AuthorizationConsentAuthenticationContext} containing an
* {@link OAuth2AuthorizationConsent.Builder}
*/
public void setAuthorizationConsentCustomizer(
Consumer<OAuth2AuthorizationConsentAuthenticationContext> authorizationConsentCustomizer) {
Assert.notNull(authorizationConsentCustomizer, "authorizationConsentCustomizer cannot be null");
this.authorizationConsentCustomizer = authorizationConsentCustomizer;
}
private static boolean isPrincipalAuthenticated(Authentication principal) {
return principal != null && !AnonymousAuthenticationToken.class.isAssignableFrom(principal.getClass())
&& principal.isAuthenticated();
}
private static void throwError(String errorCode, String parameterName) {
OAuth2Error error = new OAuth2Error(errorCode, "OAuth 2.0 Parameter: " + parameterName, ERROR_URI);
throw new OAuth2AuthenticationException(error);
}
}
|
OAuth2DeviceAuthorizationConsentAuthenticationProvider
|
java
|
redisson__redisson
|
redisson-spring-data/redisson-spring-data-21/src/main/java/org/redisson/spring/data/connection/RedissonReactiveSubscription.java
|
{
"start": 1577,
"end": 1665
}
|
class ____ implements ReactiveSubscription {
public static
|
RedissonReactiveSubscription
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/ai/model/mcp/registry/SseTransport.java
|
{
"start": 1135,
"end": 1687
}
|
class ____ {
private String type = "sse";
private String url;
private List<KeyValueInput> headers;
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public List<KeyValueInput> getHeaders() {
return headers;
}
public void setHeaders(List<KeyValueInput> headers) {
this.headers = headers;
}
}
|
SseTransport
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BindableRuntimeHintsRegistrarTests.java
|
{
"start": 16586,
"end": 16874
}
|
class ____ {
@NestedConfigurationProperty
private @Nullable Recursive recursive;
public @Nullable Recursive getRecursive() {
return this.recursive;
}
public void setRecursive(@Nullable Recursive recursive) {
this.recursive = recursive;
}
}
public static
|
WithRecursive
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEvent.java
|
{
"start": 957,
"end": 1553
}
|
class ____ extends AbstractEvent<EventType>{
private final JobId jobID;
private final HistoryEvent historyEvent;
public JobHistoryEvent(JobId jobID, HistoryEvent historyEvent) {
this(jobID, historyEvent, System.currentTimeMillis());
}
public JobHistoryEvent(JobId jobID, HistoryEvent historyEvent,
long timestamp) {
super(historyEvent.getEventType(), timestamp);
this.jobID = jobID;
this.historyEvent = historyEvent;
}
public JobId getJobID() {
return jobID;
}
public HistoryEvent getHistoryEvent() {
return historyEvent;
}
}
|
JobHistoryEvent
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1966/Issue1966Test.java
|
{
"start": 490,
"end": 911
}
|
class ____ {
@ProcessorTest
public void shouldSelectDefaultExpressionEvenWhenSourceInMappingIsNotSpecified() {
Issue1966Mapper.AnimalRecord dto = new Issue1966Mapper.AnimalRecord();
Issue1966Mapper.Animal entity = Issue1966Mapper.INSTANCE.toAnimal( dto );
assertThat( entity.getPreviousNames() ).isNotNull();
assertThat( entity.getPreviousNames() ).isEmpty();
}
}
|
Issue1966Test
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/model/RouteConfigurationOnExceptionTest.java
|
{
"start": 1083,
"end": 5019
}
|
class ____ extends ContextTestSupport {
@Override
protected RouteBuilder[] createRouteBuilders() {
return new RouteBuilder[] {
new RouteBuilder() {
@Override
public void configure() {
routeTemplate("route-template")
.from("direct:start-template")
.routeConfigurationId("my-error-handler")
.throwException(RuntimeException.class, "Expected Error");
}
},
new RouteBuilder() {
@Override
public void configure() {
routeTemplate("route-template-parameter")
.templateParameter("configuration-id")
.templateParameter("route-id")
.from("direct:start-template-parameter")
.routeId("{{route-id}}")
.routeConfigurationId("{{configuration-id}}")
.throwException(RuntimeException.class, "Expected Error");
}
},
new RouteBuilder() {
@Override
public void configure() {
TemplatedRouteBuilder.builder(context, "route-template")
.routeId("my-test-file-route")
.add();
}
},
new RouteBuilder() {
@Override
public void configure() {
TemplatedRouteBuilder.builder(context, "route-template-parameter")
.routeId("my-test-file-route-parameter")
.parameter("configuration-id", "my-error-handler")
.parameter("route-id", "custom-route-id")
.add();
}
},
new RouteBuilder() {
@Override
public void configure() {
from("direct:start-normal")
.routeConfigurationId("my-error-handler")
.throwException(RuntimeException.class, "Expected Error");
}
},
new RouteConfigurationBuilder() {
@Override
public void configuration() {
routeConfiguration("my-error-handler").onException(Exception.class).handled(true)
.transform(constant("Error Received"))
.to("mock:result");
}
}
};
}
@Test
void testRouteTemplateCanSupportRouteConfiguration() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedBodiesReceived("Error Received");
template.sendBody("direct:start-template", "foo");
assertMockEndpointsSatisfied();
}
@Test
void testRouteTemplateCanSupportRouteConfigurationWithParameter() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedBodiesReceived("Error Received");
template.sendBody("direct:start-template-parameter", "foo");
assertMockEndpointsSatisfied();
}
@Test
void testNormalRouteCanSupportRouteConfiguration() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedBodiesReceived("Error Received");
template.sendBody("direct:start-normal", "foo");
assertMockEndpointsSatisfied();
}
}
|
RouteConfigurationOnExceptionTest
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/sync/ForStSyncKeyedStateBackend.java
|
{
"start": 5489,
"end": 5726
}
|
class ____ the rules for closing/releasing native RocksDB resources as described in +
* <a
* href="https://github.com/facebook/rocksdb/wiki/RocksJava-Basics#opening-a-database-with-column-families">
* this document</a>.
*/
public
|
follows
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/MainClassFinder.java
|
{
"start": 2607,
"end": 2712
}
|
class ____ a given directory.
* @param rootDirectory the root directory to search
* @return the main
|
from
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/record/SimpleRecord.java
|
{
"start": 1166,
"end": 3898
}
|
class ____ {
private final ByteBuffer key;
private final ByteBuffer value;
private final long timestamp;
private final Header[] headers;
public SimpleRecord(long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers) {
Objects.requireNonNull(headers, "Headers must be non-null");
this.key = key;
this.value = value;
this.timestamp = timestamp;
this.headers = headers;
}
public SimpleRecord(long timestamp, byte[] key, byte[] value, Header[] headers) {
this(timestamp, Utils.wrapNullable(key), Utils.wrapNullable(value), headers);
}
public SimpleRecord(long timestamp, ByteBuffer key, ByteBuffer value) {
this(timestamp, key, value, Record.EMPTY_HEADERS);
}
public SimpleRecord(long timestamp, byte[] key, byte[] value) {
this(timestamp, Utils.wrapNullable(key), Utils.wrapNullable(value));
}
public SimpleRecord(long timestamp, byte[] value) {
this(timestamp, null, value);
}
public SimpleRecord(byte[] value) {
this(RecordBatch.NO_TIMESTAMP, null, value);
}
public SimpleRecord(ByteBuffer value) {
this(RecordBatch.NO_TIMESTAMP, null, value);
}
public SimpleRecord(byte[] key, byte[] value) {
this(RecordBatch.NO_TIMESTAMP, key, value);
}
public SimpleRecord(Record record) {
this(record.timestamp(), record.key(), record.value(), record.headers());
}
public ByteBuffer key() {
return key;
}
public ByteBuffer value() {
return value;
}
public long timestamp() {
return timestamp;
}
public Header[] headers() {
return headers;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
SimpleRecord that = (SimpleRecord) o;
return timestamp == that.timestamp &&
Objects.equals(key, that.key) &&
Objects.equals(value, that.value) &&
Arrays.equals(headers, that.headers);
}
@Override
public int hashCode() {
int result = key != null ? key.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
result = 31 * result + Long.hashCode(timestamp);
result = 31 * result + Arrays.hashCode(headers);
return result;
}
@Override
public String toString() {
return String.format("SimpleRecord(timestamp=%d, key=%d bytes, value=%d bytes)",
timestamp(),
key == null ? 0 : key.limit(),
value == null ? 0 : value.limit());
}
}
|
SimpleRecord
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/jackson/BadCredentialsExceptionMixin.java
|
{
"start": 1259,
"end": 1568
}
|
class ____ {
/**
* Constructor used by Jackson to create
* {@link org.springframework.security.authentication.BadCredentialsException} object.
* @param message the detail message
*/
@JsonCreator
BadCredentialsExceptionMixin(@JsonProperty("message") String message) {
}
}
|
BadCredentialsExceptionMixin
|
java
|
netty__netty
|
codec-xml/src/main/java/io/netty/handler/codec/xml/XmlEntityReference.java
|
{
"start": 732,
"end": 1899
}
|
class ____ {
private final String name;
private final String text;
public XmlEntityReference(String name, String text) {
this.name = name;
this.text = text;
}
public String name() {
return name;
}
public String text() {
return text;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
XmlEntityReference that = (XmlEntityReference) o;
if (name != null ? !name.equals(that.name) : that.name != null) {
return false;
}
return text != null ? text.equals(that.text) : that.text == null;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (text != null ? text.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "XmlEntityReference{" +
"name='" + name + '\'' +
", text='" + text + '\'' +
'}';
}
}
|
XmlEntityReference
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schematools/PrimaryKeyColumnOrderTest.java
|
{
"start": 6754,
"end": 6834
}
|
class ____ implements Serializable {
private int A;
private int B;
}
}
|
EntityId
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregatorFromFilters.java
|
{
"start": 2738,
"end": 11384
}
|
class ____ extends AdaptingAggregator {
static StringTermsAggregatorFromFilters adaptIntoFiltersOrNull(
String name,
AggregatorFactories factories,
AggregationContext context,
Aggregator parent,
boolean showTermDocCountError,
CardinalityUpperBound cardinality,
Map<String, Object> metadata,
ValuesSourceConfig valuesSourceConfig,
BucketOrder order,
BucketCountThresholds bucketCountThresholds,
LongPredicate acceptedOrds,
CheckedSupplier<SortedSetDocValues, IOException> valuesSupplier
) throws IOException {
if (false == valuesSourceConfig.alignsWithSearchIndex()) {
return null;
}
FilterByFilterAggregator.AdapterBuilder<StringTermsAggregatorFromFilters> filterByFilterBuilder =
new FilterByFilterAggregator.AdapterBuilder<>(name, false, false, null, context, parent, cardinality, metadata) {
@Override
protected StringTermsAggregatorFromFilters adapt(
CheckedFunction<AggregatorFactories, FilterByFilterAggregator, IOException> delegate
) throws IOException {
return new StringTermsAggregatorFromFilters(
parent,
factories,
delegate,
showTermDocCountError,
valuesSourceConfig.format(),
order,
bucketCountThresholds,
valuesSupplier
);
}
};
SortedSetDocValues values = valuesSupplier.get();
TermsEnum terms = values.termsEnum();
String field = valuesSourceConfig.fieldContext().field();
for (long ord = 0; ord < values.getValueCount(); ord++) {
if (acceptedOrds.test(ord) == false) {
continue;
}
terms.seekExact(ord);
/*
* It *feels* like there should be a query that operates
* directly on the global ordinals but there isn't. Building
* one would be tricky because there isn't mapping from
* every global ordinal to its segment ordinal - only from
* the segment ordinal to the global ordinal. You could
* search the mapping to get it but, like I said, tricky.
*/
TermQueryBuilder builder = new TermQueryBuilder(field, valuesSourceConfig.format().format(terms.term()));
filterByFilterBuilder.add(Long.toString(ord), context.buildQuery(builder));
}
return filterByFilterBuilder.build();
}
private final boolean showTermDocCountError;
private final DocValueFormat format;
private final BucketOrder order;
private final BucketCountThresholds bucketCountThresholds;
private final CheckedSupplier<SortedSetDocValues, IOException> valuesSupplier;
public StringTermsAggregatorFromFilters(
Aggregator parent,
AggregatorFactories subAggregators,
CheckedFunction<AggregatorFactories, FilterByFilterAggregator, IOException> delegate,
boolean showTermDocCountError,
DocValueFormat format,
BucketOrder order,
BucketCountThresholds bucketCountThresholds,
CheckedSupplier<SortedSetDocValues, IOException> valuesSupplier
) throws IOException {
super(parent, subAggregators, delegate);
this.showTermDocCountError = showTermDocCountError;
this.format = format;
this.order = order;
this.bucketCountThresholds = bucketCountThresholds;
this.valuesSupplier = valuesSupplier;
}
@Override
protected InternalAggregation adapt(InternalAggregation delegateResult) throws IOException {
InternalFilters filters = (InternalFilters) delegateResult;
List<StringTerms.Bucket> buckets;
long otherDocsCount = 0;
BucketOrder reduceOrder = isKeyOrder(order) ? order : InternalOrder.key(true);
/*
* We default to a shardMinDocCount of 0 which means we'd keep all
* hits, even those that don't have live documents or those that
* don't match any documents in the top level query. This is correct
* if the minDocCount is also 0, but if it is larger than 0 then we
* don't need to send those buckets back to the coordinating node.
* GlobalOrdinalsStringTermsAggregator doesn't collect those
* buckets either. It's a good thing, too, because if you take them
* into account when you sort by, say, key, you might throw away
* buckets with actual docs in them.
*/
long minDocCount = bucketCountThresholds.getShardMinDocCount();
if (minDocCount == 0 && bucketCountThresholds.getMinDocCount() > 0) {
minDocCount = 1;
}
TermsEnum terms = valuesSupplier.get().termsEnum();
if (filters.getBuckets().size() > bucketCountThresholds.getShardSize()) {
PriorityQueue<OrdBucket> queue = new PriorityQueue<>(bucketCountThresholds.getShardSize()) {
private final Comparator<Bucket> comparator = order.comparator();
@Override
protected boolean lessThan(OrdBucket a, OrdBucket b) {
return comparator.compare(a, b) > 0;
}
};
OrdBucket spare = null;
for (InternalFilters.InternalBucket b : filters.getBuckets()) {
if (b.getDocCount() < minDocCount) {
continue;
}
if (spare == null) {
spare = new OrdBucket(showTermDocCountError, format);
} else {
otherDocsCount += spare.docCount;
}
spare.globalOrd = Long.parseLong(b.getKey());
spare.docCount = b.getDocCount();
spare.aggregations = b.getAggregations();
spare = queue.insertWithOverflow(spare);
}
if (spare != null) {
otherDocsCount += spare.docCount;
}
buckets = new ArrayList<>(queue.size());
if (isKeyOrder(order) == false) {
for (OrdBucket b : queue) {
buckets.add(buildBucket(b, terms));
}
buckets.sort(reduceOrder.comparator());
} else {
/*
* Note for the curious: you can just use a for loop to iterate
* the PriorityQueue to get all of the buckets. But they don't
* come off in order. This gets them in order. It's also O(n*log(n))
* instead of O(n), but such is life. And n shouldn't be too big.
*/
while (queue.size() > 0) {
buckets.add(buildBucket(queue.pop(), terms));
}
// The buckets come off last to first so we need to flip them.
Collections.reverse(buckets);
}
} else {
buckets = new ArrayList<>(filters.getBuckets().size());
for (InternalFilters.InternalBucket b : filters.getBuckets()) {
if (b.getDocCount() < minDocCount) {
continue;
}
buckets.add(buildBucket(b, terms));
}
buckets.sort(reduceOrder.comparator());
}
return new StringTerms(
filters.getName(),
reduceOrder,
order,
bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getMinDocCount(),
filters.getMetadata(),
format,
bucketCountThresholds.getShardSize(),
showTermDocCountError,
otherDocsCount,
buckets,
null
);
}
private StringTerms.Bucket buildBucket(OrdBucket b, TermsEnum terms) throws IOException {
terms.seekExact(b.globalOrd);
return new StringTerms.Bucket(BytesRef.deepCopyOf(terms.term()), b.getDocCount(), b.aggregations, showTermDocCountError, 0, format);
}
private StringTerms.Bucket buildBucket(InternalFilters.InternalBucket b, TermsEnum terms) throws IOException {
terms.seekExact(Long.parseLong(b.getKey()));
return new StringTerms.Bucket(
BytesRef.deepCopyOf(terms.term()),
b.getDocCount(),
b.getAggregations(),
showTermDocCountError,
0,
format
);
}
}
|
StringTermsAggregatorFromFilters
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/cache/UnusedContextsIntegrationTests.java
|
{
"start": 8421,
"end": 8476
}
|
class ____ extends AbstractTestCase {
}
static
|
TestCase1
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/bean/MethodCallRefOrBeanPrefixPredicateTest.java
|
{
"start": 1016,
"end": 1966
}
|
class ____ extends ContextTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("foo", new MyFooBean());
return jndi;
}
@Test
public void testRefOrBeanPrefix() throws Exception {
getMockEndpoint("mock:a").expectedBodiesReceived("A");
getMockEndpoint("mock:b").expectedBodiesReceived("B");
template.sendBody("direct:a", "A");
template.sendBody("direct:b", "B");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:a").filter().method("ref:foo").to("mock:a");
from("direct:b").filter().method("bean:foo").to("mock:b");
}
};
}
}
|
MethodCallRefOrBeanPrefixPredicateTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java
|
{
"start": 1181,
"end": 4457
}
|
class ____ {
public static final String PATH = "path";
}
private final String name;
private final String targetPath;
public FieldAliasMapper(String simpleName, String name, String targetPath) {
super(simpleName);
this.name = Mapper.internFieldName(name);
this.targetPath = targetPath;
}
@Override
public String fullPath() {
return name;
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
public String targetPath() {
return targetPath;
}
@Override
public Mapper merge(Mapper mergeWith, MapperMergeContext mapperMergeContext) {
if ((mergeWith instanceof FieldAliasMapper) == false) {
throw new IllegalArgumentException(
"Cannot merge a field alias mapping [" + fullPath() + "] with a mapping that is not for a field alias."
);
}
return mergeWith;
}
@Override
public Iterator<Mapper> iterator() {
return Collections.emptyIterator();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject(leafName()).field("type", CONTENT_TYPE).field(Names.PATH, targetPath).endObject();
}
@Override
public void validate(MappingLookup mappers) {
if (Objects.equals(this.targetPath(), this.fullPath())) {
throw new MapperParsingException(
"Invalid [path] value [" + targetPath + "] for field alias [" + fullPath() + "]: an alias cannot refer to itself."
);
}
if (mappers.fieldTypesLookup().get(targetPath) == null) {
throw new MapperParsingException(
"Invalid [path] value ["
+ targetPath
+ "] for field alias ["
+ fullPath()
+ "]: an alias must refer to an existing field in the mappings."
);
}
if (mappers.getMapper(targetPath) instanceof FieldAliasMapper) {
throw new MapperParsingException(
"Invalid [path] value [" + targetPath + "] for field alias [" + fullPath() + "]: an alias cannot refer to another alias."
);
}
String aliasScope = mappers.nestedLookup().getNestedParent(name);
String pathScope = mappers.nestedLookup().getNestedParent(targetPath);
if (Objects.equals(aliasScope, pathScope) == false) {
StringBuilder message = new StringBuilder(
"Invalid [path] value ["
+ targetPath
+ "] for field alias ["
+ name
+ "]: an alias must have the same nested scope as its target. "
);
message.append(aliasScope == null ? "The alias is not nested" : "The alias's nested scope is [" + aliasScope + "]");
message.append(", but ");
message.append(pathScope == null ? "the target is not nested." : "the target's nested scope is [" + pathScope + "].");
throw new IllegalArgumentException(message.toString());
}
}
@Override
public int getTotalFieldsCount() {
return 1;
}
public static
|
Names
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/model/stream/QMediaStoreUriLoader.java
|
{
"start": 8393,
"end": 8600
}
|
class ____ extends Factory<ParcelFileDescriptor> {
public FileDescriptorFactory(Context context) {
super(context, ParcelFileDescriptor.class);
}
}
private abstract static
|
FileDescriptorFactory
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/collection/immutabletarget/CupboardMapper.java
|
{
"start": 484,
"end": 655
}
|
interface ____ {
CupboardMapper INSTANCE = Mappers.getMapper( CupboardMapper.class );
void map( CupboardDto in, @MappingTarget CupboardEntity out );
}
|
CupboardMapper
|
java
|
quarkusio__quarkus
|
tcks/microprofile-opentelemetry/src/test/java/io/quarkus/tck/opentelemetry/ArquillianLifecycle.java
|
{
"start": 224,
"end": 473
}
|
class ____ {
public void afterDeploy(@Observes AfterDeploy event, TestClass testClass) {
// The TCK expects the url to end with a slash
System.setProperty("test.url", System.getProperty("test.url") + "/");
}
}
|
ArquillianLifecycle
|
java
|
apache__spark
|
core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
|
{
"start": 3382,
"end": 20779
}
|
class ____ extends MemoryConsumer implements ShuffleChecksumSupport {
private static final SparkLogger logger =
SparkLoggerFactory.getLogger(ShuffleExternalSorter.class);
@VisibleForTesting
static final int DISK_WRITE_BUFFER_SIZE = 1024 * 1024;
private final int numPartitions;
private final TaskMemoryManager taskMemoryManager;
private final BlockManager blockManager;
private final TaskContext taskContext;
private final ShuffleWriteMetricsReporter writeMetrics;
/**
* Force this sorter to spill when there are this many elements in memory.
*/
private final int numElementsForSpillThreshold;
/**
* Force this sorter to spill when the in memory size in bytes is beyond this threshold.
*/
private final long sizeInBytesForSpillThreshold;
/** The buffer size to use when writing spills using DiskBlockObjectWriter */
private final int fileBufferSizeBytes;
/** The buffer size to use when writing the sorted records to an on-disk file */
private final int diskWriteBufferSize;
/**
* Memory pages that hold the records being sorted. The pages in this list are freed when
* spilling, although in principle we could recycle these pages across spills (on the other hand,
* this might not be necessary if we maintained a pool of re-usable pages in the TaskMemoryManager
* itself).
*/
private final LinkedList<MemoryBlock> allocatedPages = new LinkedList<>();
private final LinkedList<SpillInfo> spills = new LinkedList<>();
/** Peak memory used by this sorter so far, in bytes. **/
private long peakMemoryUsedBytes;
// These variables are reset after spilling:
@Nullable private ShuffleInMemorySorter inMemSorter;
@Nullable private MemoryBlock currentPage = null;
private long pageCursor = -1;
private long totalPageMemoryUsageBytes = 0;
// Checksum calculator for each partition. Empty when shuffle checksum disabled.
private final Checksum[] partitionChecksums;
ShuffleExternalSorter(
TaskMemoryManager memoryManager,
BlockManager blockManager,
TaskContext taskContext,
int initialSize,
int numPartitions,
SparkConf conf,
ShuffleWriteMetricsReporter writeMetrics) throws SparkException {
super(memoryManager,
(int) Math.min(PackedRecordPointer.MAXIMUM_PAGE_SIZE_BYTES, memoryManager.pageSizeBytes()),
memoryManager.getTungstenMemoryMode());
this.taskMemoryManager = memoryManager;
this.blockManager = blockManager;
this.taskContext = taskContext;
this.numPartitions = numPartitions;
// Use getSizeAsKb (not bytes) to maintain backwards compatibility if no units are provided
this.fileBufferSizeBytes =
(int) (long) conf.get(package$.MODULE$.SHUFFLE_FILE_BUFFER_SIZE()) * 1024;
this.numElementsForSpillThreshold =
(int) conf.get(package$.MODULE$.SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD());
this.sizeInBytesForSpillThreshold =
(long) conf.get(package$.MODULE$.SHUFFLE_SPILL_MAX_SIZE_FORCE_SPILL_THRESHOLD());
this.writeMetrics = writeMetrics;
this.inMemSorter = new ShuffleInMemorySorter(
this, initialSize, (boolean) conf.get(package$.MODULE$.SHUFFLE_SORT_USE_RADIXSORT()));
this.peakMemoryUsedBytes = getMemoryUsage();
this.diskWriteBufferSize =
(int) (long) conf.get(package$.MODULE$.SHUFFLE_DISK_WRITE_BUFFER_SIZE());
this.partitionChecksums = createPartitionChecksums(numPartitions, conf);
}
public long[] getChecksums() {
return getChecksumValues(partitionChecksums);
}
/**
* Sorts the in-memory records and writes the sorted records to an on-disk file.
* This method does not free the sort data structures.
*
* @param isFinalFile if true, this indicates that we're writing the final output file and that
* the bytes written should be counted towards shuffle write metrics rather
* than shuffle spill metrics.
*/
private void writeSortedFile(boolean isFinalFile) {
// Only emit the log if this is an actual spilling.
if (!isFinalFile) {
logger.info(
"Task {} on Thread {} spilling sort data of {} to disk ({} {} so far)",
MDC.of(LogKeys.TASK_ATTEMPT_ID, taskContext.taskAttemptId()),
MDC.of(LogKeys.THREAD_ID, Thread.currentThread().getId()),
MDC.of(LogKeys.MEMORY_SIZE, Utils.bytesToString(getMemoryUsage())),
MDC.of(LogKeys.NUM_SPILLS, spills.size()),
MDC.of(LogKeys.SPILL_TIMES, spills.size() != 1 ? "times" : "time"));
}
// This call performs the actual sort.
final ShuffleInMemorySorter.ShuffleSorterIterator sortedRecords =
inMemSorter.getSortedIterator();
// If there are no sorted records, so we don't need to create an empty spill file.
if (!sortedRecords.hasNext()) {
return;
}
final ShuffleWriteMetricsReporter writeMetricsToUse;
if (isFinalFile) {
// We're writing the final non-spill file, so we _do_ want to count this as shuffle bytes.
writeMetricsToUse = writeMetrics;
} else {
// We're spilling, so bytes written should be counted towards spill rather than write.
// Create a dummy WriteMetrics object to absorb these metrics, since we don't want to count
// them towards shuffle bytes written.
// The actual shuffle bytes written will be counted when we merge the spill files.
writeMetricsToUse = new ShuffleWriteMetrics();
}
// Small writes to DiskBlockObjectWriter will be fairly inefficient. Since there doesn't seem to
// be an API to directly transfer bytes from managed memory to the disk writer, we buffer
// data through a byte array. This array does not need to be large enough to hold a single
// record;
final byte[] writeBuffer = new byte[diskWriteBufferSize];
// Because this output will be read during shuffle, its compression codec must be controlled by
// spark.shuffle.compress instead of spark.shuffle.spill.compress, so we need to use
// createTempShuffleBlock here; see SPARK-3426 for more details.
final Tuple2<TempShuffleBlockId, File> spilledFileInfo =
blockManager.diskBlockManager().createTempShuffleBlock();
final File file = spilledFileInfo._2();
final TempShuffleBlockId blockId = spilledFileInfo._1();
final SpillInfo spillInfo = new SpillInfo(numPartitions, file, blockId);
// Unfortunately, we need a serializer instance in order to construct a DiskBlockObjectWriter.
// Our write path doesn't actually use this serializer (since we end up calling the `write()`
// OutputStream methods), but DiskBlockObjectWriter still calls some methods on it. To work
// around this, we pass a dummy no-op serializer.
final SerializerInstance ser = DummySerializerInstance.INSTANCE;
int currentPartition = -1;
final FileSegment committedSegment;
try (DiskBlockObjectWriter writer =
blockManager.getDiskWriter(blockId, file, ser, fileBufferSizeBytes, writeMetricsToUse)) {
final int uaoSize = UnsafeAlignedOffset.getUaoSize();
while (sortedRecords.hasNext()) {
sortedRecords.loadNext();
final int partition = sortedRecords.packedRecordPointer.getPartitionId();
assert (partition >= currentPartition);
if (partition != currentPartition) {
// Switch to the new partition
if (currentPartition != -1) {
final FileSegment fileSegment = writer.commitAndGet();
spillInfo.partitionLengths[currentPartition] = fileSegment.length();
}
currentPartition = partition;
if (partitionChecksums.length > 0) {
writer.setChecksum(partitionChecksums[currentPartition]);
}
}
final long recordPointer = sortedRecords.packedRecordPointer.getRecordPointer();
final Object recordPage = taskMemoryManager.getPage(recordPointer);
final long recordOffsetInPage = taskMemoryManager.getOffsetInPage(recordPointer);
int dataRemaining = UnsafeAlignedOffset.getSize(recordPage, recordOffsetInPage);
long recordReadPosition = recordOffsetInPage + uaoSize; // skip over record length
while (dataRemaining > 0) {
final int toTransfer = Math.min(diskWriteBufferSize, dataRemaining);
Platform.copyMemory(
recordPage, recordReadPosition, writeBuffer, Platform.BYTE_ARRAY_OFFSET, toTransfer);
writer.write(writeBuffer, 0, toTransfer);
recordReadPosition += toTransfer;
dataRemaining -= toTransfer;
}
writer.recordWritten();
}
committedSegment = writer.commitAndGet();
}
// If `writeSortedFile()` was called from `closeAndGetSpills()` and no records were inserted,
// then the file might be empty. Note that it might be better to avoid calling
// writeSortedFile() in that case.
if (currentPartition != -1) {
spillInfo.partitionLengths[currentPartition] = committedSegment.length();
spills.add(spillInfo);
}
if (!isFinalFile) { // i.e. this is a spill file
// The current semantics of `shuffleRecordsWritten` seem to be that it's updated when records
// are written to disk, not when they enter the shuffle sorting code. DiskBlockObjectWriter
// relies on its `recordWritten()` method being called in order to trigger periodic updates to
// `shuffleBytesWritten`. If we were to remove the `recordWritten()` call and increment that
// counter at a higher-level, then the in-progress metrics for records written and bytes
// written would get out of sync.
//
// When writing the last file, we pass `writeMetrics` directly to the DiskBlockObjectWriter;
// in all other cases, we pass in a dummy write metrics to capture metrics, then copy those
// metrics to the true write metrics here. The reason for performing this copying is so that
// we can avoid reporting spilled bytes as shuffle write bytes.
//
// Note that we intentionally ignore the value of `writeMetricsToUse.shuffleWriteTime()`.
// Consistent with ExternalSorter, we do not count this IO towards shuffle write time.
// SPARK-3577 tracks the spill time separately.
// This is guaranteed to be a ShuffleWriteMetrics based on the if check in the beginning
// of this method.
writeMetrics.incRecordsWritten(
((ShuffleWriteMetrics)writeMetricsToUse).recordsWritten());
taskContext.taskMetrics().incDiskBytesSpilled(
((ShuffleWriteMetrics)writeMetricsToUse).bytesWritten());
}
}
/**
* Sort and spill the current records in response to memory pressure.
*/
@Override
public long spill(long size, MemoryConsumer trigger) throws IOException {
if (trigger != this || inMemSorter == null || inMemSorter.numRecords() == 0) {
return 0L;
}
writeSortedFile(false);
final long spillSize = freeMemory();
inMemSorter.reset();
// Reset the in-memory sorter's pointer array only after freeing up the memory pages holding the
// records. Otherwise, if the task is over allocated memory, then without freeing the memory
// pages, we might not be able to get memory for the pointer array.
taskContext.taskMetrics().incMemoryBytesSpilled(spillSize);
return spillSize;
}
private long getMemoryUsage() {
return ((inMemSorter == null) ? 0 : inMemSorter.getMemoryUsage()) + totalPageMemoryUsageBytes;
}
private void updatePeakMemoryUsed() {
long mem = getMemoryUsage();
if (mem > peakMemoryUsedBytes) {
peakMemoryUsedBytes = mem;
}
}
/**
* Return the peak memory used so far, in bytes.
*/
long getPeakMemoryUsedBytes() {
updatePeakMemoryUsed();
return peakMemoryUsedBytes;
}
private long freeMemory() {
updatePeakMemoryUsed();
long memoryFreed = 0;
for (MemoryBlock block : allocatedPages) {
memoryFreed += block.size();
freePage(block);
totalPageMemoryUsageBytes -= block.size();
}
allocatedPages.clear();
currentPage = null;
pageCursor = 0;
return memoryFreed;
}
/**
* Force all memory and spill files to be deleted; called by shuffle error-handling code.
*/
public void cleanupResources() {
freeMemory();
if (inMemSorter != null) {
inMemSorter.free();
inMemSorter = null;
}
for (SpillInfo spill : spills) {
if (spill.file.exists() && !spill.file.delete()) {
logger.error("Unable to delete spill file {}",
MDC.of(LogKeys.PATH, spill.file.getPath()));
}
}
}
/**
* Checks whether there is enough space to insert an additional record in to the sort pointer
* array and grows the array if additional space is required. If the required space cannot be
* obtained, then the in-memory data will be spilled to disk.
*/
private void growPointerArrayIfNecessary() throws IOException {
assert(inMemSorter != null);
if (!inMemSorter.hasSpaceForAnotherRecord()) {
long used = inMemSorter.getMemoryUsage();
LongArray array;
try {
// could trigger spilling
array = allocateArray(used / 8 * 2);
} catch (TooLargePageException e) {
// The pointer array is too big to fix in a single page, spill.
spill();
return;
} catch (SparkOutOfMemoryError e) {
// should have trigger spilling
if (!inMemSorter.hasSpaceForAnotherRecord()) {
logger.error("Unable to grow the pointer array");
throw e;
}
return;
}
// check if spilling is triggered or not
if (inMemSorter.hasSpaceForAnotherRecord()) {
freeArray(array);
} else {
inMemSorter.expandPointerArray(array);
}
}
}
/**
* Allocates more memory in order to insert an additional record. This will request additional
* memory from the memory manager and spill if the requested memory can not be obtained.
*
* @param required the required space in the data page, in bytes, including space for storing
* the record size. This must be less than or equal to the page size (records
* that exceed the page size are handled via a different code path which uses
* special overflow pages).
*/
private void acquireNewPageIfNecessary(int required) {
if (currentPage == null ||
pageCursor + required > currentPage.getBaseOffset() + currentPage.size() ) {
// TODO: try to find space in previous pages
currentPage = allocatePage(required);
pageCursor = currentPage.getBaseOffset();
allocatedPages.add(currentPage);
totalPageMemoryUsageBytes += currentPage.size();
}
}
/**
* Write a record to the shuffle sorter.
*/
public void insertRecord(Object recordBase, long recordOffset, int length, int partitionId)
throws IOException {
assert(inMemSorter != null);
if (inMemSorter.numRecords() >= numElementsForSpillThreshold) {
logger.info("Spilling data because number of spilledRecords ({}) crossed the threshold {}",
MDC.of(LogKeys.NUM_ELEMENTS_SPILL_RECORDS, inMemSorter.numRecords()),
MDC.of(LogKeys.NUM_ELEMENTS_SPILL_THRESHOLD, numElementsForSpillThreshold));
spill();
}
// TODO: Ideally we only need to check the spill threshold when new memory needs to be
// allocated (both this sorter and the underlying ShuffleInMemorySorter may allocate
// new memory), but it's simpler to check the total memory usage of these two sorters
// before inserting each record.
final long usedMemory = getMemoryUsage();
if (usedMemory >= sizeInBytesForSpillThreshold) {
logger.info("Spilling data because memory usage ({}) crossed the threshold {}",
MDC.of(LogKeys.SPILL_RECORDS_SIZE, usedMemory),
MDC.of(LogKeys.SPILL_RECORDS_SIZE_THRESHOLD, sizeInBytesForSpillThreshold));
spill();
}
growPointerArrayIfNecessary();
final int uaoSize = UnsafeAlignedOffset.getUaoSize();
// Need 4 or 8 bytes to store the record length.
final int required = length + uaoSize;
acquireNewPageIfNecessary(required);
assert(currentPage != null);
final Object base = currentPage.getBaseObject();
final long recordAddress = taskMemoryManager.encodePageNumberAndOffset(currentPage, pageCursor);
UnsafeAlignedOffset.putSize(base, pageCursor, length);
pageCursor += uaoSize;
Platform.copyMemory(recordBase, recordOffset, base, pageCursor, length);
pageCursor += length;
inMemSorter.insertRecord(recordAddress, partitionId);
}
/**
* Close the sorter, causing any buffered data to be sorted and written out to disk.
*
* @return metadata for the spill files written by this sorter. If no records were ever inserted
* into this sorter, then this will return an empty array.
*/
public SpillInfo[] closeAndGetSpills() throws IOException {
if (inMemSorter != null) {
// Here we are spilling the remaining data in the buffer. If there is no spill before, this
// final spill file will be the final shuffle output file.
writeSortedFile(/* isFinalFile = */spills.isEmpty());
freeMemory();
inMemSorter.free();
inMemSorter = null;
}
return spills.toArray(new SpillInfo[spills.size()]);
}
}
|
ShuffleExternalSorter
|
java
|
playframework__playframework
|
core/play/src/main/java/play/libs/reflect/ConstructorUtils.java
|
{
"start": 4980,
"end": 5456
}
|
class ____ generally accessible, i.e. is declared in an entirely
* {@code public} manner.
*
* @param type to check
* @return {@code true} if {@code type} and any enclosing classes are {@code public}.
*/
private static boolean isAccessible(final Class<?> type) {
Class<?> cls = type;
while (cls != null) {
if (!Modifier.isPublic(cls.getModifiers())) {
return false;
}
cls = cls.getEnclosingClass();
}
return true;
}
}
|
is
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/lookup/keyordered/RecordsBuffer.java
|
{
"start": 1329,
"end": 6037
}
|
class ____<ELEMENT, KEY> {
/**
* The stream elements in this buffer have finished async operation but have not been output.
*/
private final Map<KEY, Deque<ELEMENT>> finishedBuffer;
/** The stream element in this buffer is being executed. */
private final Map<KEY, ELEMENT> activeBuffer;
/**
* The element in that should wait until all preceding records on identical key finishing its
* execution. After which the queueing element will move element into the active buffer.
*/
private final Map<KEY, Deque<ELEMENT>> blockingBuffer;
// ===== metrics =====
private int blockingSize;
private int finishSize;
public RecordsBuffer() {
this.activeBuffer = new ConcurrentHashMap<>();
this.finishedBuffer = new ConcurrentHashMap<>();
this.blockingBuffer = new ConcurrentHashMap<>();
this.blockingSize = 0;
this.finishSize = 0;
}
public void enqueueRecord(KEY key, ELEMENT record) {
blockingBuffer.computeIfAbsent(key, k -> new LinkedList<>()).add(record);
blockingSize++;
}
public Optional<ELEMENT> pop(KEY key) {
if (!blockingBuffer.containsKey(key)) {
return Optional.empty();
}
ELEMENT element = blockingBuffer.get(key).poll();
if (element == null) {
return Optional.empty();
}
blockingSize--;
if (blockingBuffer.get(key).isEmpty()) {
blockingBuffer.remove(key);
}
activeBuffer.put(key, element);
return Optional.of(element);
}
public void finish(KEY key, ELEMENT element) {
finishedBuffer.computeIfAbsent(key, k -> new LinkedList<>()).add(element);
finishSize++;
Preconditions.checkState(activeBuffer.containsKey(key));
activeBuffer.remove(key);
}
public void output(KEY key, ELEMENT element) {
Preconditions.checkState(finishedBuffer.containsKey(key));
finishedBuffer.get(key).remove(element);
finishSize--;
if (finishedBuffer.get(key).isEmpty()) {
finishedBuffer.remove(key);
}
}
/** Collect all elements which are not emitted for snapshot. */
public Map<KEY, Deque<ELEMENT>> pendingElements() {
Map<KEY, Deque<ELEMENT>> mergedMap = new HashMap<>();
finishedBuffer.forEach(
(key, value) ->
mergedMap.merge(
key,
value,
(existingValue, newValue) -> {
existingValue.addAll(newValue);
return existingValue;
}));
activeBuffer.forEach(
(key, value) ->
mergedMap.computeIfAbsent(key, k -> new ArrayDeque<>()).push(value));
blockingBuffer.forEach(
(key, value) ->
mergedMap.merge(
key,
value,
(existingValue, newValue) -> {
existingValue.addAll(newValue);
return existingValue;
}));
return mergedMap;
}
public String sizeToString() {
int finishSize = 0;
for (Deque<ELEMENT> deque : finishedBuffer.values()) {
finishSize += deque.size();
}
int activeSize = activeBuffer.size();
int blockingSize = blockingBuffer.size();
for (Deque<ELEMENT> deque : blockingBuffer.values()) {
blockingSize += deque.size();
}
return "finished buffer size = "
+ finishSize
+ " active buffer size = "
+ activeSize
+ " blocking buffer size = "
+ blockingSize;
}
public void close() {
finishedBuffer.clear();
activeBuffer.clear();
blockingBuffer.clear();
}
// ===== metrics =====
public int getBlockingSize() {
return blockingSize;
}
public int getActiveSize() {
return activeBuffer.size();
}
public int getFinishSize() {
return finishSize;
}
// ===== visible for test =====
@VisibleForTesting
public Map<KEY, Deque<ELEMENT>> getFinishedBuffer() {
return finishedBuffer;
}
@VisibleForTesting
public Map<KEY, ELEMENT> getActiveBuffer() {
return activeBuffer;
}
@VisibleForTesting
public Map<KEY, Deque<ELEMENT>> getBlockingBuffer() {
return blockingBuffer;
}
}
|
RecordsBuffer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelType.java
|
{
"start": 983,
"end": 2494
}
|
enum ____ {
TREE_ENSEMBLE(null),
LANG_IDENT(null),
PYTORCH(new TrainedModelInput(Collections.singletonList("input")));
public static TrainedModelType fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
/**
* Introspect the given model and return the model type
* representing it.
* @param model A Trained model
* @return The model type or null if unknown
*/
public static TrainedModelType typeFromTrainedModel(TrainedModel model) {
if (model instanceof Ensemble || model instanceof Tree) {
return TrainedModelType.TREE_ENSEMBLE;
} else if (model instanceof LangIdentNeuralNetwork) {
return TrainedModelType.LANG_IDENT;
} else {
return null;
}
}
private final TrainedModelInput defaultInput;
TrainedModelType(@Nullable TrainedModelInput defaultInput) {
this.defaultInput = defaultInput;
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
@Nullable
public TrainedModelInput getDefaultInput() {
return defaultInput;
}
public TrainedModelLocation getDefaultLocation(String modelId) {
return switch (this) {
case TREE_ENSEMBLE, LANG_IDENT -> new IndexLocation(InferenceIndexConstants.LATEST_INDEX_NAME);
case PYTORCH -> new IndexLocation(InferenceIndexConstants.nativeDefinitionStore());
};
}
}
|
TrainedModelType
|
java
|
apache__flink
|
flink-test-utils-parent/flink-connector-test-utils/src/main/java/org/apache/flink/connector/testframe/external/ExternalContextFactory.java
|
{
"start": 987,
"end": 1231
}
|
interface ____<C extends ExternalContext> {
/**
* Create an instance of {@link ExternalContext}.
*
* @param testName name of the current running test
*/
C createExternalContext(String testName);
}
|
ExternalContextFactory
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/authentication/www/BasicAuthenticationEntryPoint.java
|
{
"start": 1748,
"end": 2488
}
|
class ____ implements AuthenticationEntryPoint, InitializingBean {
private @Nullable String realmName;
@Override
public void afterPropertiesSet() {
Assert.hasText(this.realmName, "realmName must be specified");
}
@Override
public void commence(HttpServletRequest request, HttpServletResponse response,
AuthenticationException authException) throws IOException {
response.setHeader("WWW-Authenticate", "Basic realm=\"" + this.realmName + "\"");
response.sendError(HttpStatus.UNAUTHORIZED.value(), HttpStatus.UNAUTHORIZED.getReasonPhrase());
}
public @Nullable String getRealmName() {
return this.realmName;
}
public void setRealmName(String realmName) {
this.realmName = realmName;
}
}
|
BasicAuthenticationEntryPoint
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/metadata/ConfigurableMetadataServiceExporter.java
|
{
"start": 2401,
"end": 8388
}
|
class ____ {
private final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(getClass());
@Deprecated
private final MetadataServiceDelegation metadataService;
private final MetadataServiceDelegationV2 metadataServiceV2;
@Deprecated
private volatile ServiceConfig<MetadataService> serviceConfig;
private volatile ServiceConfig<MetadataServiceV2> serviceConfigV2;
private final ApplicationModel applicationModel;
public ConfigurableMetadataServiceExporter(
ApplicationModel applicationModel,
MetadataServiceDelegation metadataService,
MetadataServiceDelegationV2 metadataServiceV2) {
this.applicationModel = applicationModel;
this.metadataService = metadataService;
this.metadataServiceV2 = metadataServiceV2;
}
public synchronized ConfigurableMetadataServiceExporter export() {
if (!isExported()) {
if (MetadataServiceVersionUtils.needExportV1(applicationModel)) {
exportV1();
}
if (MetadataServiceVersionUtils.needExportV2(applicationModel)) {
exportV2();
}
} else {
if (logger.isWarnEnabled()) {
logger.warn(
CONFIG_METADATA_SERVICE_EXPORTED,
"",
"",
"The MetadataService has been exported : " + getExportedUrls());
}
}
return this;
}
/**
* Get exported urls which include v1 and v2 if existed
* @return exported urls
*/
public List<URL> getExportedUrls() {
List<URL> urls = new ArrayList<>();
if (serviceConfig != null) {
urls.addAll(serviceConfig.getExportedUrls());
}
if (serviceConfigV2 != null) {
urls.addAll(serviceConfigV2.getExportedUrls());
}
return urls;
}
private static final String INTERNAL_METADATA_REGISTRY_ID = "internal-metadata-registry";
private void exportV1() {
ExecutorService internalServiceExecutor = applicationModel
.getFrameworkModel()
.getBeanFactory()
.getBean(FrameworkExecutorRepository.class)
.getInternalServiceExecutor();
this.serviceConfig = InternalServiceConfigBuilder.<MetadataService>newBuilder(applicationModel)
.interfaceClass(MetadataService.class)
.protocol(getApplicationConfig().getMetadataServiceProtocol(), METADATA_SERVICE_PROTOCOL_KEY)
.port(getApplicationConfig().getMetadataServicePort(), METADATA_SERVICE_PORT_KEY)
.registryId(INTERNAL_METADATA_REGISTRY_ID)
.executor(internalServiceExecutor)
.ref(metadataService)
.version(V1)
.build(configConsumer -> configConsumer.setMethods(generateMethodConfig()));
serviceConfig.export();
metadataService.setMetadataURL(serviceConfig.getExportedUrls().get(0));
if (logger.isInfoEnabled()) {
logger.info("[SERVICE_PUBLISH] [METADATA_REGISTER] The MetadataService exports urls : "
+ serviceConfig.getExportedUrls());
}
}
private void exportV2() {
ExecutorService internalServiceExecutor = applicationModel
.getFrameworkModel()
.getBeanFactory()
.getBean(FrameworkExecutorRepository.class)
.getInternalServiceExecutor();
this.serviceConfigV2 = InternalServiceConfigBuilder.<MetadataServiceV2>newBuilder(applicationModel)
.interfaceClass(MetadataServiceV2.class)
.protocol(TRIPLE, METADATA_SERVICE_PROTOCOL_KEY)
.port(getApplicationConfig().getMetadataServicePort(), METADATA_SERVICE_PORT_KEY)
.registryId(INTERNAL_METADATA_REGISTRY_ID)
.executor(internalServiceExecutor)
.ref(metadataServiceV2)
.version(V2)
.build();
serviceConfigV2.export();
metadataServiceV2.setMetadataUrl(serviceConfigV2.getExportedUrls().get(0));
if (logger.isInfoEnabled()) {
logger.info("[SERVICE_PUBLISH][METADATA_REGISTER] The MetadataServiceV2 exports urls : "
+ serviceConfigV2.getExportedUrls());
}
}
public ConfigurableMetadataServiceExporter unexport() {
if (isExported()) {
serviceConfig.unexport();
serviceConfigV2.unexport();
metadataService.setMetadataURL(null);
}
return this;
}
private boolean v1Exported() {
return serviceConfig != null && serviceConfig.isExported() && !serviceConfig.isUnexported();
}
private boolean v2Exported() {
return serviceConfigV2 != null && serviceConfigV2.isExported() && !serviceConfigV2.isUnexported();
}
public boolean isExported() {
return v1Exported() || v2Exported();
}
private ApplicationConfig getApplicationConfig() {
return applicationModel.getApplicationConfigManager().getApplication().get();
}
/**
* Generate Method Config for Service Discovery Metadata <p/>
* <p>
* Make {@link MetadataService} support argument callback,
* used to notify {@link org.apache.dubbo.registry.client.ServiceInstance}'s
* metadata change event
*
* @since 3.0
*/
private List<MethodConfig> generateMethodConfig() {
MethodConfig methodConfig = new MethodConfig();
methodConfig.setName("getAndListenInstanceMetadata");
ArgumentConfig argumentConfig = new ArgumentConfig();
argumentConfig.setIndex(1);
argumentConfig.setCallback(true);
methodConfig.setArguments(Collections.singletonList(argumentConfig));
return Collections.singletonList(methodConfig);
}
}
|
ConfigurableMetadataServiceExporter
|
java
|
apache__hadoop
|
hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/util/RemoteIterators.java
|
{
"start": 2697,
"end": 3384
}
|
class ____<T> implements RemoteIterator<T> {
private final T singleton;
private boolean processed;
private SingletonIterator(@Nullable T singleton) {
this.singleton = singleton;
this.processed = singleton == null;
}
@Override
public boolean hasNext() {
return !processed;
}
@Override
public T next() {
if (hasNext()) {
processed = true;
return singleton;
} else {
throw new NoSuchElementException();
}
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("singleton", singleton)
.toString();
}
}
}
|
SingletonIterator
|
java
|
apache__kafka
|
trogdor/src/main/java/org/apache/kafka/trogdor/rest/Message.java
|
{
"start": 933,
"end": 1370
}
|
class ____ {
@Override
public final boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
return Objects.equals(toString(), o.toString());
}
@Override
public final int hashCode() {
return toString().hashCode();
}
@Override
public final String toString() {
return JsonUtil.toJsonString(this);
}
}
|
Message
|
java
|
spring-projects__spring-framework
|
spring-tx/src/test/java/org/springframework/transaction/event/CapturingSynchronizationCallback.java
|
{
"start": 833,
"end": 1282
}
|
class ____ implements TransactionalApplicationListener.SynchronizationCallback {
@Nullable ApplicationEvent preEvent;
@Nullable ApplicationEvent postEvent;
@Nullable Throwable ex;
@Override
public void preProcessEvent(ApplicationEvent event) {
this.preEvent = event;
}
@Override
public void postProcessEvent(ApplicationEvent event, @Nullable Throwable ex) {
this.postEvent = event;
this.ex = ex;
}
}
|
CapturingSynchronizationCallback
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/writer/BeanDefinitionWriter.java
|
{
"start": 10114,
"end": 10777
}
|
class ____ implements ClassOutputWriter, BeanDefinitionVisitor, BeanElement, Toggleable {
@NextMajorVersion("Inline as true")
public static final String OMIT_CONFPROP_INJECTION_POINTS = "micronaut.processing.omit.confprop.injectpoints";
public static final String CLASS_SUFFIX = "$Definition";
private static final Constructor<AbstractBeanDefinitionBeanConstructor> CONSTRUCTOR_ABSTRACT_CONSTRUCTOR_IP = ReflectionUtils.findConstructor(
AbstractBeanDefinitionBeanConstructor.class,
BeanDefinition.class)
.orElseThrow(() -> new ClassGenerationException("Invalid version of Micronaut present on the
|
BeanDefinitionWriter
|
java
|
apache__camel
|
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/HashMapHeadersMapFactory.java
|
{
"start": 1286,
"end": 1769
}
|
class ____ implements HeadersMapFactory {
@Override
public Map<String, Object> newMap() {
return new HashMap<>();
}
@Override
public Map<String, Object> newMap(Map<String, Object> map) {
return new HashMap<>(map);
}
@Override
public boolean isInstanceOf(Map<String, Object> map) {
return map instanceof HashMap;
}
@Override
public boolean isCaseInsensitive() {
return false;
}
}
|
HashMapHeadersMapFactory
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/testing/springbootapplications/autoconfiguredspringrestdocs/withmockmvc/MyRestDocsConfiguration.java
|
{
"start": 1098,
"end": 1342
}
|
class ____ implements RestDocsMockMvcConfigurationCustomizer {
@Override
public void customize(MockMvcRestDocumentationConfigurer configurer) {
configurer.snippets().withTemplateFormat(TemplateFormats.markdown());
}
}
|
MyRestDocsConfiguration
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/kstream/internals/KTableFilterTest.java
|
{
"start": 2576,
"end": 27321
}
|
class ____ {
private final Consumed<String, Integer> consumed = Consumed.with(Serdes.String(), Serdes.Integer());
private final Properties props = StreamsTestUtils.getStreamsConfig(Serdes.String(), Serdes.Integer());
@BeforeEach
public void setUp() {
// disable caching at the config level
props.setProperty(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, "0");
}
private final Predicate<String, Integer> predicate = (key, value) -> (value % 2) == 0;
private void doTestKTable(final StreamsBuilder builder,
final KTable<String, Integer> table2,
final KTable<String, Integer> table3,
final String topic) {
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
table2.toStream().process(supplier);
table3.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopic =
driver.createInputTopic(topic, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput("A", 1, 10L);
inputTopic.pipeInput("B", 2, 5L);
inputTopic.pipeInput("C", 3, 8L);
inputTopic.pipeInput("D", 4, 14L);
inputTopic.pipeInput("A", null, 18L);
inputTopic.pipeInput("B", null, 15L);
}
final List<MockApiProcessor<String, Integer, Void, Void>> processors = supplier.capturedProcessors(2);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", null, 10),
new KeyValueTimestamp<>("B", 2, 5),
new KeyValueTimestamp<>("C", null, 8),
new KeyValueTimestamp<>("D", 4, 14),
new KeyValueTimestamp<>("A", null, 18),
new KeyValueTimestamp<>("B", null, 15));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", 1, 10),
new KeyValueTimestamp<>("B", null, 5),
new KeyValueTimestamp<>("C", 3, 8),
new KeyValueTimestamp<>("D", null, 14),
new KeyValueTimestamp<>("A", null, 18),
new KeyValueTimestamp<>("B", null, 15));
}
@Test
public void shouldPassThroughWithoutMaterialization() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTable<String, Integer> table1 = builder.table(topic1, consumed);
final KTable<String, Integer> table2 = table1.filter(predicate);
final KTable<String, Integer> table3 = table1.filterNot(predicate);
assertNull(table1.queryableStoreName());
assertNull(table2.queryableStoreName());
assertNull(table3.queryableStoreName());
doTestKTable(builder, table2, table3, topic1);
}
@Test
public void shouldPassThroughOnMaterialization() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTable<String, Integer> table1 = builder.table(topic1, consumed);
final KTable<String, Integer> table2 = table1.filter(predicate, Materialized.as("store2"));
final KTable<String, Integer> table3 = table1.filterNot(predicate);
assertNull(table1.queryableStoreName());
assertEquals("store2", table2.queryableStoreName());
assertNull(table3.queryableStoreName());
doTestKTable(builder, table2, table3, topic1);
}
private void doTestValueGetter(final StreamsBuilder builder,
final KTableImpl<String, Integer, Integer> table2,
final KTableImpl<String, Integer, Integer> table3,
final String topic1) {
final Topology topology = builder.build();
final KTableValueGetterSupplier<String, Integer> getterSupplier2 = table2.valueGetterSupplier();
final KTableValueGetterSupplier<String, Integer> getterSupplier3 = table3.valueGetterSupplier();
final InternalTopologyBuilder topologyBuilder = TopologyWrapper.getInternalTopologyBuilder(topology);
topologyBuilder.connectProcessorAndStateStores(table2.name, getterSupplier2.storeNames());
topologyBuilder.connectProcessorAndStateStores(table3.name, getterSupplier3.storeNames());
try (final TopologyTestDriverWrapper driver = new TopologyTestDriverWrapper(topology, props)) {
final TestInputTopic<String, Integer> inputTopic =
driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final KTableValueGetter<String, Integer> getter2 = getterSupplier2.get();
final KTableValueGetter<String, Integer> getter3 = getterSupplier3.get();
getter2.init(driver.setCurrentNodeForProcessorContext(table2.name));
getter3.init(driver.setCurrentNodeForProcessorContext(table3.name));
inputTopic.pipeInput("A", 1, 5L);
inputTopic.pipeInput("B", 1, 10L);
inputTopic.pipeInput("C", 1, 15L);
assertNull(getter2.get("A"));
assertNull(getter2.get("B"));
assertNull(getter2.get("C"));
assertEquals(ValueAndTimestamp.make(1, 5L), getter3.get("A"));
assertEquals(ValueAndTimestamp.make(1, 10L), getter3.get("B"));
assertEquals(ValueAndTimestamp.make(1, 15L), getter3.get("C"));
inputTopic.pipeInput("A", 2, 10L);
inputTopic.pipeInput("B", 2, 5L);
assertEquals(ValueAndTimestamp.make(2, 10L), getter2.get("A"));
assertEquals(ValueAndTimestamp.make(2, 5L), getter2.get("B"));
assertNull(getter2.get("C"));
assertNull(getter3.get("A"));
assertNull(getter3.get("B"));
assertEquals(ValueAndTimestamp.make(1, 15L), getter3.get("C"));
inputTopic.pipeInput("A", 3, 15L);
assertNull(getter2.get("A"));
assertEquals(ValueAndTimestamp.make(2, 5L), getter2.get("B"));
assertNull(getter2.get("C"));
assertEquals(ValueAndTimestamp.make(3, 15L), getter3.get("A"));
assertNull(getter3.get("B"));
assertEquals(ValueAndTimestamp.make(1, 15L), getter3.get("C"));
inputTopic.pipeInput("A", null, 10L);
inputTopic.pipeInput("B", null, 20L);
assertNull(getter2.get("A"));
assertNull(getter2.get("B"));
assertNull(getter2.get("C"));
assertNull(getter3.get("A"));
assertNull(getter3.get("B"));
assertEquals(ValueAndTimestamp.make(1, 15L), getter3.get("C"));
}
}
@Test
public void shouldGetValuesOnMaterialization() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, Integer, Integer> table1 =
(KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed);
final KTableImpl<String, Integer, Integer> table2 =
(KTableImpl<String, Integer, Integer>) table1.filter(predicate, Materialized.as("store2"));
final KTableImpl<String, Integer, Integer> table3 =
(KTableImpl<String, Integer, Integer>) table1.filterNot(predicate, Materialized.as("store3"));
final KTableImpl<String, Integer, Integer> table4 =
(KTableImpl<String, Integer, Integer>) table1.filterNot(predicate);
assertNull(table1.queryableStoreName());
assertEquals("store2", table2.queryableStoreName());
assertEquals("store3", table3.queryableStoreName());
assertNull(table4.queryableStoreName());
doTestValueGetter(builder, table2, table3, topic1);
}
private void doTestNotSendingOldValue(final StreamsBuilder builder,
final KTableImpl<String, Integer, Integer> table1,
final KTableImpl<String, Integer, Integer> table2,
final String topic1) {
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
builder.build().addProcessor("proc1", supplier, table1.name);
builder.build().addProcessor("proc2", supplier, table2.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopic =
driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput("A", 1, 5L);
inputTopic.pipeInput("B", 1, 10L);
inputTopic.pipeInput("C", 1, 15L);
final List<MockApiProcessor<String, Integer, Void, Void>> processors = supplier.capturedProcessors(2);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5),
new KeyValueTimestamp<>("B", new Change<>(1, null), 10),
new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 5),
new KeyValueTimestamp<>("B", new Change<>(null, null), 10),
new KeyValueTimestamp<>("C", new Change<>(null, null), 15));
inputTopic.pipeInput("A", 2, 15L);
inputTopic.pipeInput("B", 2, 8L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 15),
new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 15),
new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
inputTopic.pipeInput("A", 3, 20L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, null), 20));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 20));
inputTopic.pipeInput("A", null, 10L);
inputTopic.pipeInput("B", null, 20L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 10),
new KeyValueTimestamp<>("B", new Change<>(null, null), 20));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 10),
new KeyValueTimestamp<>("B", new Change<>(null, null), 20));
}
}
@Test
public void shouldNotSendOldValuesWithoutMaterialization() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, Integer, Integer> table1 =
(KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed);
final KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(predicate);
doTestNotSendingOldValue(builder, table1, table2, topic1);
}
@Test
public void shouldNotSendOldValuesOnMaterialization() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, Integer, Integer> table1 =
(KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed);
final KTableImpl<String, Integer, Integer> table2 =
(KTableImpl<String, Integer, Integer>) table1.filter(predicate, Materialized.as("store2"));
doTestNotSendingOldValue(builder, table1, table2, topic1);
}
@Test
public void shouldNotEnableSendingOldValuesIfNotAlreadyMaterializedAndNotForcedToMaterialize() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, Integer, Integer> table1 =
(KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed);
final KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(predicate);
table2.enableSendingOldValues(false);
doTestNotSendingOldValue(builder, table1, table2, topic1);
}
private void doTestSendingOldValue(final StreamsBuilder builder,
final KTableImpl<String, Integer, Integer> table1,
final KTableImpl<String, Integer, Integer> table2,
final String topic1) {
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build();
topology.addProcessor("proc1", supplier, table1.name);
topology.addProcessor("proc2", supplier, table2.name);
final boolean parentSendOldVals = table1.sendingOldValueEnabled();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, Integer> inputTopic =
driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput("A", 1, 5L);
inputTopic.pipeInput("B", 1, 10L);
inputTopic.pipeInput("C", 1, 15L);
final List<MockApiProcessor<String, Integer, Void, Void>> processors = supplier.capturedProcessors(2);
final MockApiProcessor<String, Integer, Void, Void> table1Output = processors.get(0);
final MockApiProcessor<String, Integer, Void, Void> table2Output = processors.get(1);
table1Output.checkAndClearProcessResult(
new KeyValueTimestamp<>("A", new Change<>(1, null), 5),
new KeyValueTimestamp<>("B", new Change<>(1, null), 10),
new KeyValueTimestamp<>("C", new Change<>(1, null), 15)
);
table2Output.checkEmptyAndClearProcessResult();
inputTopic.pipeInput("A", 2, 15L);
inputTopic.pipeInput("B", 2, 8L);
table1Output.checkAndClearProcessResult(
new KeyValueTimestamp<>("A", new Change<>(2, parentSendOldVals ? 1 : null), 15),
new KeyValueTimestamp<>("B", new Change<>(2, parentSendOldVals ? 1 : null), 8)
);
table2Output.checkAndClearProcessResult(
new KeyValueTimestamp<>("A", new Change<>(2, null), 15),
new KeyValueTimestamp<>("B", new Change<>(2, null), 8)
);
inputTopic.pipeInput("A", 3, 20L);
table1Output.checkAndClearProcessResult(
new KeyValueTimestamp<>("A", new Change<>(3, parentSendOldVals ? 2 : null), 20)
);
table2Output.checkAndClearProcessResult(
new KeyValueTimestamp<>("A", new Change<>(null, 2), 20)
);
inputTopic.pipeInput("A", null, 10L);
inputTopic.pipeInput("B", null, 20L);
table1Output.checkAndClearProcessResult(
new KeyValueTimestamp<>("A", new Change<>(null, parentSendOldVals ? 3 : null), 10),
new KeyValueTimestamp<>("B", new Change<>(null, parentSendOldVals ? 2 : null), 20)
);
table2Output.checkAndClearProcessResult(
new KeyValueTimestamp<>("B", new Change<>(null, 2), 20)
);
}
}
@Test
public void shouldEnableSendOldValuesWhenNotMaterializedAlreadyButForcedToMaterialize() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, Integer, Integer> table1 =
(KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed);
final KTableImpl<String, Integer, Integer> table2 =
(KTableImpl<String, Integer, Integer>) table1.filter(predicate);
table2.enableSendingOldValues(true);
assertThat(table1.sendingOldValueEnabled(), is(true));
assertThat(table2.sendingOldValueEnabled(), is(true));
doTestSendingOldValue(builder, table1, table2, topic1);
}
@Test
public void shouldEnableSendOldValuesWhenMaterializedAlreadyAndForcedToMaterialize() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, Integer, Integer> table1 =
(KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed);
final KTableImpl<String, Integer, Integer> table2 =
(KTableImpl<String, Integer, Integer>) table1.filter(predicate, Materialized.as("store2"));
table2.enableSendingOldValues(true);
assertThat(table1.sendingOldValueEnabled(), is(false));
assertThat(table2.sendingOldValueEnabled(), is(true));
doTestSendingOldValue(builder, table1, table2, topic1);
}
@Test
public void shouldSendOldValuesWhenEnabledOnUpStreamMaterialization() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, Integer, Integer> table1 =
(KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed, Materialized.as("store2"));
final KTableImpl<String, Integer, Integer> table2 =
(KTableImpl<String, Integer, Integer>) table1.filter(predicate);
table2.enableSendingOldValues(false);
assertThat(table1.sendingOldValueEnabled(), is(true));
assertThat(table2.sendingOldValueEnabled(), is(true));
doTestSendingOldValue(builder, table1, table2, topic1);
}
private void doTestSkipNullOnMaterialization(final StreamsBuilder builder,
final KTableImpl<String, String, String> table1,
final KTableImpl<String, String, String> table2,
final String topic1,
final boolean shouldSkip) {
final MockApiProcessorSupplier<String, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build();
topology.addProcessor("proc1", supplier, table1.name);
topology.addProcessor("proc2", supplier, table2.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, String> stringinputTopic =
driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
stringinputTopic.pipeInput("A", "reject", 5L);
stringinputTopic.pipeInput("B", "reject", 10L);
stringinputTopic.pipeInput("C", "reject", 20L);
}
final List<MockApiProcessor<String, String, Void, Void>> processors = supplier.capturedProcessors(2);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>("reject", null), 5),
new KeyValueTimestamp<>("B", new Change<>("reject", null), 10),
new KeyValueTimestamp<>("C", new Change<>("reject", null), 20));
if (shouldSkip) {
processors.get(1).checkEmptyAndClearProcessResult();
} else {
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 5),
new KeyValueTimestamp<>("B", new Change<>(null, null), 10),
new KeyValueTimestamp<>("C", new Change<>(null, null), 20));
}
}
@Test
public void shouldSkipNullToRepartitionWithoutMaterialization() {
// Do not explicitly set enableSendingOldValues. Let a further downstream stateful operator trigger it instead.
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
final KTableImpl<String, String, String> table1 =
(KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, String> table2 =
(KTableImpl<String, String, String>) table1.filter((key, value) -> value.equalsIgnoreCase("accept"));
table2.groupBy(MockMapper.noOpKeyValueMapper())
.reduce(MockReducer.STRING_ADDER, MockReducer.STRING_REMOVER);
doTestSkipNullOnMaterialization(builder, table1, table2, topic1, true);
}
@Test
public void shouldSkipNullToRepartitionOnMaterialization() {
// Do not explicitly set enableSendingOldValues. Let a further downstream stateful operator trigger it instead.
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
final KTableImpl<String, String, String> table1 =
(KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, String> table2 =
(KTableImpl<String, String, String>) table1.filter((key, value) -> value.equalsIgnoreCase("accept"), Materialized.as("store2"));
table2.groupBy(MockMapper.noOpKeyValueMapper())
.reduce(MockReducer.STRING_ADDER, MockReducer.STRING_REMOVER, Materialized.as("mock-result"));
doTestSkipNullOnMaterialization(builder, table1, table2, topic1, true);
}
@Test
public void shouldNotSkipNullIfVersionedUpstream() {
// stateful downstream operation enables sendOldValues, but duplicate nulls will still
// be sent because the source table is versioned
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final Materialized<String, String, KeyValueStore<Bytes, byte[]>> versionedMaterialize =
Materialized.as(Stores.persistentVersionedKeyValueStore("versioned", Duration.ofMinutes(5)));
final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
final KTableImpl<String, String, String> table1 =
(KTableImpl<String, String, String>) builder.table(topic1, consumed, versionedMaterialize);
final KTableImpl<String, String, String> table2 =
(KTableImpl<String, String, String>) table1.filter((key, value) -> value.equalsIgnoreCase("accept"));
table2.groupBy(MockMapper.noOpKeyValueMapper())
.reduce(MockReducer.STRING_ADDER, MockReducer.STRING_REMOVER);
doTestSkipNullOnMaterialization(builder, table1, table2, topic1, false);
}
@Test
public void shouldSkipNullIfVersionedDownstream() {
// materializing the result of the filter as a versioned store does not prevent duplicate
// tombstones from being sent, as it's whether the input table is versioned or not that
// determines whether the optimization is enabled
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final Materialized<String, String, KeyValueStore<Bytes, byte[]>> versionedMaterialize =
Materialized.as(Stores.persistentVersionedKeyValueStore("versioned", Duration.ofMinutes(5)));
final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
final KTableImpl<String, String, String> table1 =
(KTableImpl<String, String, String>) builder.table(topic1, consumed, Materialized.as("store"));
final KTableImpl<String, String, String> table2 =
(KTableImpl<String, String, String>) table1.filter((key, value) -> value.equalsIgnoreCase("accept"), versionedMaterialize);
table2.groupBy(MockMapper.noOpKeyValueMapper())
.reduce(MockReducer.STRING_ADDER, MockReducer.STRING_REMOVER);
doTestSkipNullOnMaterialization(builder, table1, table2, topic1, true);
}
@Test
public void testTypeVariance() {
final Predicate<Number, Object> numberKeyPredicate = (key, value) -> false;
new StreamsBuilder()
.<Integer, String>table("empty")
.filter(numberKeyPredicate)
.filterNot(numberKeyPredicate)
.toStream()
.to("nirvana");
}
}
|
KTableFilterTest
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-maven-plugin/src/test/java/org/apache/camel/maven/CamelSalesforceMojoOutputTest.java
|
{
"start": 2514,
"end": 9425
}
|
class ____ {
private static final String TEST_CALCULATED_FORMULA_FILE = "complex_calculated_formula.json";
private static final String TEST_CASE_FILE = "case.json";
private static final Logger LOG = LoggerFactory.getLogger(SchemaExecution.class.getName());
@Parameter(1)
public SObjectDescription description;
@Parameter(4)
public Function<String, String> fileNameAdapter = Function.identity();
@Parameter(0)
public String json;
@Parameter(3)
public GenerateMojo mojo;
@Parameter(2)
public Set<String> sources;
@Test
public void testProcessDescription(@TempDir File pkgDir) throws Exception {
final GenerateExecution.GeneratorUtility utility = mojo.generatorUtility();
final RestClient client = mockRestClient();
ObjectDescriptions descriptions = new ObjectDescriptions(client, 0, null, null, null, null, LOG);
mojo.enumerationOverrideProperties.put("Case.PickListValueOverride.A+", "APlus");
Set<String> sObjectNames = StreamSupport.stream(descriptions.fetched().spliterator(), false)
.map(SObjectDescription::getName).collect(Collectors.toSet());
mojo.setup();
mojo.setDescriptions(descriptions);
mojo.processDescription(pkgDir, description, utility, sObjectNames);
for (final String source : sources) {
final String expected = fileNameAdapter.apply(source);
final File generatedFile = new File(pkgDir, source);
final String generatedContent = FileUtils.readFileToString(generatedFile, StandardCharsets.UTF_8);
final String expectedContent = IOUtils.toString(
CamelSalesforceMojoOutputTest.class.getResource("/generated/" + expected), StandardCharsets.UTF_8);
assertEquals(expectedContent, generatedContent,
"Generated source file in " + source + " must be equal to the one present in test/resources/" + expected);
}
}
@Parameters(name = "json = {0}, source = {2}")
public static Iterable<Object[]> parameters() throws IOException {
return Arrays.asList(testCase(TEST_CASE_FILE, "Case.java"),
testCase(TEST_CASE_FILE, "Case_PickListAccentMarkEnum.java"),
testCase(TEST_CASE_FILE, "Case_PickListQuotationMarkEnum.java"),
testCase(TEST_CASE_FILE, "Case_PickListSlashEnum.java"),
testCase(TEST_CASE_FILE, "Case_PickListValueOverrideEnum.java"),
testCase(TEST_CASE_FILE, "QueryRecordsCase.java"),
testCase(TEST_CALCULATED_FORMULA_FILE, "ComplexCalculatedFormula.java"),
testCase(TEST_CALCULATED_FORMULA_FILE, "QueryRecordsComplexCalculatedFormula.java"),
testCase("asset.json", "Asset.java"), //
testCase("asset.json", mojo -> {
mojo.customTypes = new HashMap<>();
mojo.customTypes.put("date", "java.time.LocalDateTime");
mojo.setup();
}, s -> "Asset_LocalDateTime.java", "Asset.java"), testCase("with_reference.json", "With_Reference__c.java"));
}
static GenerateMojo createMojo() {
final GenerateMojo mojo = new GenerateMojo();
return mojo;
}
static SObjectDescription createSObjectDescription(final String name) throws IOException {
try (InputStream inputStream = CamelSalesforceMojoOutputTest.class.getResourceAsStream("/" + name)) {
final ObjectMapper mapper = JsonUtils.createObjectMapper();
return mapper.readValue(inputStream, SObjectDescription.class);
}
}
static RestClient mockRestClient() {
final RestClient client = mock(RestClient.class);
doAnswer(provideResource("/global_sobjects.json")).when(client).getGlobalObjects(anyMap(), any(ResponseCallback.class));
doAnswer(provideResource("/account.json")).when(client).getDescription(eq("Account"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/asset.json")).when(client).getDescription(eq("Asset"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/case.json")).when(client).getDescription(eq("Case"), anyMap(), any(ResponseCallback.class));
doAnswer(provideResource("/invoice.json")).when(client).getDescription(eq("Invoice__c"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/line_item.json")).when(client).getDescription(eq("Line_Item__c"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/merchandise.json")).when(client).getDescription(eq("Merchandise__c"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/with_reference.json")).when(client).getDescription(eq("With_Reference__c"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/product2.json")).when(client).getDescription(eq("Product2"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/with_external_id.json")).when(client).getDescription(eq("With_External_Id__c"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/group.json")).when(client).getDescription(eq("Group"), anyMap(),
any(ResponseCallback.class));
doAnswer(provideResource("/user.json")).when(client).getDescription(eq("User"), anyMap(), any(ResponseCallback.class));
return client;
}
static Answer<Void> provideResource(final String resource) {
return invocation -> {
final ResponseCallback callback = Arrays.stream(invocation.getArguments())
.filter(ResponseCallback.class::isInstance).map(ResponseCallback.class::cast).findFirst()
.get();
callback.onResponse(CamelSalesforceMojoOutputTest.class.getResourceAsStream(resource), null, null);
return null;
};
}
static Object[] testCase(
final String json, final Consumer<GenerateMojo> mojoConfigurator, final Function<String, String> adapter,
final String... sources)
throws IOException {
final GenerateMojo mojo = createMojo();
mojoConfigurator.accept(mojo);
return new Object[] { json, createSObjectDescription(json), new HashSet<>(Arrays.asList(sources)), mojo, adapter };
}
static Object[] testCase(final String json, final Consumer<GenerateMojo> mojoConfigurator, final String... sources)
throws IOException {
return testCase(json, mojoConfigurator, Function.identity(), sources);
}
static Object[] testCase(final String json, final String... sources) throws IOException {
return testCase(json, String::valueOf, sources);
}
}
|
CamelSalesforceMojoOutputTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cut/generic/GenericCompositeUserTypeEntity.java
|
{
"start": 435,
"end": 997
}
|
class ____ {
@Embedded
@CompositeType(value = EnumPlaceholderUserType.class)
@AttributeOverrides({
@AttributeOverride(name = "type", column = @Column(name = "TYPE", updatable = false)),
@AttributeOverride(name = "jsonValue", column = @Column(name = "DATA", updatable = false, columnDefinition = "clob"))
})
protected EnumPlaceholder placeholder;
@Id
private final Long id;
public GenericCompositeUserTypeEntity(EnumPlaceholder placeholder) {
this.id = System.currentTimeMillis();
this.placeholder = placeholder;
}
}
|
GenericCompositeUserTypeEntity
|
java
|
processing__processing4
|
core/src/processing/opengl/FontTexture.java
|
{
"start": 10728,
"end": 11922
}
|
class ____ {
int texIndex;
int width;
int height;
int[] crop;
float u0, u1;
float v0, v1;
int[] pixels;
TextureInfo(int tidx, int cropX, int cropY, int cropW, int cropH,
int[] pix) {
texIndex = tidx;
crop = new int[4];
// The region of the texture corresponding to the glyph is surrounded by a
// 1-pixel wide border to avoid artifacts due to bilinear sampling. This
// is why the additions and subtractions to the crop values.
crop[0] = cropX + 1;
crop[1] = cropY + 1 + cropH - 2;
crop[2] = cropW - 2;
crop[3] = -cropH + 2;
pixels = pix;
updateUV();
updateTex();
}
void updateUV() {
width = textures[texIndex].glWidth;
height = textures[texIndex].glHeight;
u0 = (float)crop[0] / (float)width;
u1 = u0 + (float)crop[2] / (float)width;
v0 = (float)(crop[1] + crop[3]) / (float)height;
v1 = v0 - (float)crop[3] / (float)height;
}
void updateTex() {
textures[texIndex].setNative(pixels, crop[0] - 1, crop[1] + crop[3] - 1,
crop[2] + 2, -crop[3] + 2);
}
}
}
|
TextureInfo
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/test/java/org/apache/camel/component/salesforce/CompositeApiManualIT.java
|
{
"start": 2361,
"end": 11242
}
|
class ____ extends AbstractQueryRecordsBase<Account> {
}
private static final Set<String> VERSIONS = new HashSet<>(Arrays.asList("38.0", SalesforceEndpointConfig.DEFAULT_VERSION));
@Parameter
private String format;
@Parameter(1)
private String version;
private String accountId;
private String compositeUri;
@AfterEach
public void removeRecords() {
try {
template.sendBody("salesforce:deleteSObject?sObjectName=Account&sObjectId=" + accountId, null);
} catch (final CamelExecutionException ignored) {
// other tests run in parallel could have deleted the Account
}
template.request("direct:deleteBatchAccounts", null);
}
@BeforeEach
public void setupRecords() {
compositeUri = "salesforce:composite?format=" + format;
final Account account = new Account();
account.setName("Composite API Batch");
final CreateSObjectResult result = template.requestBody("salesforce:createSObject", account, CreateSObjectResult.class);
accountId = result.getId();
}
@Test
public void shouldSubmitBatchUsingCompositeApi() {
final SObjectComposite composite = new SObjectComposite(version, true);
final Account updates = new Account();
updates.setName("NewName");
composite.addUpdate("Account", accountId, updates, "UpdateExistingAccountReferenceId");
final Account newAccount = new Account();
newAccount.setName("Account created from Composite batch API");
composite.addCreate(newAccount, "CreateAccountReferenceId");
composite.addGet("Account", accountId, "GetAccountReferenceId", "Name", "BillingPostalCode");
composite.addDelete("Account", accountId, "DeleteAccountReferenceId");
testComposite(composite);
}
@Test
public void shouldSupportGenericCompositeRequests() {
final SObjectComposite composite = new SObjectComposite(version, true);
composite.addGeneric(Method.GET, "/sobjects/Account/" + accountId, "GetExistingAccountReferenceId");
testComposite(composite);
}
@Test
public void shouldSupportObjectCreation() {
final SObjectComposite composite = new SObjectComposite(version, true);
final Account newAccount = new Account();
newAccount.setName("Account created from Composite batch API");
composite.addCreate(newAccount, "CreateAccountReferenceId");
final SObjectCompositeResponse response = testComposite(composite);
assertResponseContains(response, "id");
}
@Test
public void shouldSupportObjectDeletion() {
final SObjectComposite composite = new SObjectComposite(version, true);
composite.addDelete("Account", accountId, "DeleteAccountReferenceId");
testComposite(composite);
}
@Test
public void shouldSupportObjectRetrieval() {
final SObjectComposite composite = new SObjectComposite(version, true);
composite.addGet("Account", accountId, "GetExistingAccountReferenceId", "Name");
final SObjectCompositeResponse response = testComposite(composite);
assertResponseContains(response, "Name");
}
@Test
public void shouldSupportObjectUpdates() {
final SObjectComposite composite = new SObjectComposite(version, true);
final Account updates = new Account();
updates.setName("NewName");
updates.setAccountNumber("AC12345");
composite.addUpdate("Account", accountId, updates, "UpdateAccountReferenceId");
testComposite(composite);
}
@Test
public void shouldSupportObjectUpserts() {
final SObjectComposite composite = new SObjectComposite(version, true);
final Line_Item__c li = new Line_Item__c();
composite.addUpsertByExternalId("Line_Item__c", "Name", "AC12345", li,
"UpsertLineItemReferenceId");
testComposite(composite);
}
@Test
public void shouldSupportRaw() throws Exception {
final String rawComposite = "{\n" +
" \"allOrNone\" : true,\n" +
" \"compositeRequest\" : [{\n" +
" \"method\": \"GET\",\n" +
" \"url\": \"/services/data/v" + version
+ "/query/?q=SELECT+Id+FROM+Contact+LIMIT+1\",\n" +
" \"referenceId\": \"contacts\"\n" +
" }]\n" +
"}\n";
final String response = testRawComposite(rawComposite);
ObjectMapper objectMapper = new ObjectMapper();
SObjectCompositeResponse sObjectCompositeResponse = objectMapper.readValue(
response, SObjectCompositeResponse.class);
assertResponseContains(sObjectCompositeResponse, "done");
}
@Test
public void shouldSupportQuery() {
final SObjectComposite composite = new SObjectComposite(version, true);
composite.addQuery("SELECT Id, Name FROM Account", "SelectQueryReferenceId");
final SObjectCompositeResponse response = testComposite(composite);
assertResponseContains(response, "totalSize");
}
@Test
public void shouldSupportQueryAll() {
final SObjectComposite composite = new SObjectComposite(version, true);
composite.addQueryAll("SELECT Id, Name FROM Account", "SelectQueryReferenceId");
final SObjectCompositeResponse response = testComposite(composite);
assertResponseContains(response, "totalSize");
}
@Test
public void shouldSupportRelatedObjectRetrieval() {
assumeFalse(Version.create(version).compareTo(Version.create("36.0")) < 0,
"Version must be greater than or equal to 36.0");
final SObjectComposite composite = new SObjectComposite("36.0", true);
composite.addGetRelated("Account", accountId, "CreatedBy", "GetRelatedAccountReferenceId");
final SObjectCompositeResponse response = testComposite(composite);
assertResponseContains(response, "Username");
}
SObjectCompositeResponse testComposite(final SObjectComposite batch) {
final SObjectCompositeResponse response = template.requestBody(compositeUri, batch, SObjectCompositeResponse.class);
Assertions.assertThat(response).as("Response should be provided").isNotNull();
Assertions.assertThat(response.getCompositeResponse()).as("Received errors in: " + response)
.allMatch(val -> val.getHttpStatusCode() >= 200 && val.getHttpStatusCode() <= 299);
return response;
}
String testRawComposite(final String rawComposite) {
final String rawCompositeUri = "salesforce:composite?rawPayload=true";
final String response = template.requestBody(rawCompositeUri, rawComposite, String.class);
Assertions.assertThat(response).as("Response should be provided").isNotNull();
return response;
}
@Override
protected RouteBuilder doCreateRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:deleteBatchAccounts")
.to("salesforce:query?sObjectClass=" + Accounts.class.getName()
+ "&sObjectQuery=SELECT Id FROM Account WHERE Name = 'Account created from Composite batch API'")
.split(simple("${body.records}")).setHeader("sObjectId", simple("${body.id}"))
.to("salesforce:deleteSObject?sObjectName=Account").end();
}
};
}
@Parameters(name = "format = {0}, version = {1}")
public static Iterable<Object[]> formats() {
return VERSIONS.stream().map(v -> new Object[] { "JSON", v }).collect(Collectors.toList());
}
static void assertResponseContains(final SObjectCompositeResponse response, final String key) {
Assertions.assertThat(response).isNotNull();
final List<SObjectCompositeResult> compositeResponse = response.getCompositeResponse();
Assertions.assertThat(compositeResponse).hasSize(1);
final SObjectCompositeResult firstCompositeResponse = compositeResponse.get(0);
Assertions.assertThat(firstCompositeResponse).isNotNull();
final Object firstCompositeResponseBody = firstCompositeResponse.getBody();
Assertions.assertThat(firstCompositeResponseBody).isInstanceOf(Map.class);
@SuppressWarnings("unchecked")
final Map<String, ?> body = (Map<String, ?>) firstCompositeResponseBody;
Assertions.assertThat(body).containsKey(key);
Assertions.assertThat(body.get(key)).isNotNull();
}
}
|
Accounts
|
java
|
apache__camel
|
components/camel-jcache/src/test/java/org/apache/camel/component/jcache/JCacheManagerTest.java
|
{
"start": 1014,
"end": 1866
}
|
class ____ extends JCacheComponentTestSupport {
@Test
public void testCacheCreation() throws Exception {
JCacheConfiguration conf = new JCacheConfiguration();
conf.setCacheName(randomString());
JCacheManager<Object, Object> manager = new JCacheManager<>(conf);
assertNotNull(manager.getCache());
manager.close();
}
@Test
public void testCacheCreationFailure() {
JCacheConfiguration conf = new JCacheConfiguration();
conf.setCacheName(randomString());
conf.setCreateCacheIfNotExists(false);
final JCacheManager<Object, Object> objectObjectJCacheManager = new JCacheManager<>(conf);
assertThrows(IllegalStateException.class,
() -> {
objectObjectJCacheManager.getCache();
});
}
}
|
JCacheManagerTest
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/connector/source/abilities/SupportsLookupCustomShuffle.java
|
{
"start": 1156,
"end": 1522
}
|
interface ____ designed to allow connectors to provide a custom partitioning strategy for the
* data that is fed into the {@link LookupTableSource}. This enables the Flink Planner to optimize
* the distribution of input stream across different subtasks of lookup-join node to match the
* distribution of data in the external data source.
*/
@PublicEvolving
public
|
is
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/support/TestPropertySourceUtilsTests.java
|
{
"start": 3624,
"end": 13658
}
|
class ____ resource")
.withMessageContaining("does not exist")
.withMessageContaining("ExtendedEmptyPropertySources.properties");
}
@Test
void repeatedTestPropertySourcesWithConflictingInheritLocationsFlags() {
assertThatIllegalArgumentException()
.isThrownBy(() -> buildMergedTestPropertySources(RepeatedPropertySourcesWithConflictingInheritLocationsFlags.class))
.withMessage("@TestPropertySource on RepeatedPropertySourcesWithConflictingInheritLocationsFlags and " +
"@InheritLocationsFalseTestProperty on RepeatedPropertySourcesWithConflictingInheritLocationsFlags " +
"must declare the same value for 'inheritLocations' as other directly present or meta-present @TestPropertySource annotations");
}
@Test
void repeatedTestPropertySourcesWithConflictingInheritPropertiesFlags() {
assertThatIllegalArgumentException()
.isThrownBy(() -> buildMergedTestPropertySources(RepeatedPropertySourcesWithConflictingInheritPropertiesFlags.class))
.withMessage("@TestPropertySource on RepeatedPropertySourcesWithConflictingInheritPropertiesFlags and " +
"@InheritPropertiesFalseTestProperty on RepeatedPropertySourcesWithConflictingInheritPropertiesFlags " +
"must declare the same value for 'inheritProperties' as other directly present or meta-present @TestPropertySource annotations");
}
@Test
void value() {
assertMergedTestPropertySources(ValuePropertySources.class, asArray("classpath:/value.xml"),
EMPTY_STRING_ARRAY);
}
@Test
void locationsAndValueAttributes() {
assertThatExceptionOfType(AnnotationConfigurationException.class)
.isThrownBy(() -> buildMergedTestPropertySources(LocationsAndValuePropertySources.class));
}
@Test
void locationsAndProperties() {
assertMergedTestPropertySources(LocationsAndPropertiesPropertySources.class,
asArray("classpath:/foo1.xml", "classpath:/foo2.xml"), asArray("k1a=v1a", "k1b: v1b"));
}
@Test
void inheritedLocationsAndProperties() {
assertMergedTestPropertySources(InheritedPropertySources.class,
asArray("classpath:/foo1.xml", "classpath:/foo2.xml"), asArray("k1a=v1a", "k1b: v1b"));
}
/**
* @since 5.3
*/
@Test
void locationsAndPropertiesDuplicatedLocally() {
assertMergedTestPropertySources(LocallyDuplicatedLocationsAndProperties.class,
asArray("classpath:/foo1.xml", "classpath:/foo2.xml"), asArray("k1a=v1a", "k1b: v1b"));
}
/**
* @since 5.3
*/
@Test
void locationsAndPropertiesDuplicatedOnSuperclass() {
assertMergedTestPropertySources(DuplicatedLocationsAndPropertiesPropertySources.class,
asArray("classpath:/foo1.xml", "classpath:/foo2.xml"), asArray("k1a=v1a", "k1b: v1b"));
}
/**
* @since 5.3
*/
@Test
void locationsAndPropertiesDuplicatedOnEnclosingClass() {
assertMergedTestPropertySources(LocationsAndPropertiesPropertySources.Nested.class,
asArray("classpath:/foo1.xml", "classpath:/foo2.xml"), asArray("k1a=v1a", "k1b: v1b"));
}
@Test
void extendedLocationsAndProperties() {
assertMergedTestPropertySources(ExtendedPropertySources.class,
asArray("classpath:/foo1.xml", "classpath:/foo2.xml", "classpath:/bar1.xml", "classpath:/bar2.xml"),
asArray("k1a=v1a", "k1b: v1b", "k2a v2a", "k2b: v2b"));
}
@Test
void overriddenLocations() {
assertMergedTestPropertySources(OverriddenLocationsPropertySources.class,
asArray("classpath:/baz.properties"), asArray("k1a=v1a", "k1b: v1b", "key = value"));
}
@Test
void overriddenProperties() {
assertMergedTestPropertySources(OverriddenPropertiesPropertySources.class,
asArray("classpath:/foo1.xml", "classpath:/foo2.xml", "classpath:/baz.properties"), KEY_VALUE_PAIR);
}
@Test
void overriddenLocationsAndProperties() {
assertMergedTestPropertySources(OverriddenLocationsAndPropertiesPropertySources.class,
asArray("classpath:/baz.properties"), KEY_VALUE_PAIR);
}
@Test
void addPropertiesFilesToEnvironmentWithNullContext() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addPropertiesFilesToEnvironment(null, FOO_LOCATIONS))
.withMessageContaining("'context' must not be null");
}
@Test
void addPropertiesFilesToEnvironmentWithContextAndNullLocations() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addPropertiesFilesToEnvironment(mock(ConfigurableApplicationContext.class), (String[]) null))
.withMessageContaining("'locations' must not be null");
}
@Test
void addPropertiesFilesToEnvironmentWithNullEnvironment() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addPropertiesFilesToEnvironment(null, mock(), FOO_LOCATIONS))
.withMessageContaining("'environment' must not be null");
}
@Test
void addPropertiesFilesToEnvironmentWithEnvironmentLocationsAndNullResourceLoader() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addPropertiesFilesToEnvironment(new MockEnvironment(), null, FOO_LOCATIONS))
.withMessageContaining("'resourceLoader' must not be null");
}
@Test
void addPropertiesFilesToEnvironmentWithEnvironmentAndNullLocations() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addPropertiesFilesToEnvironment(new MockEnvironment(), mock(), (String[]) null))
.withMessageContaining("'locations' must not be null");
}
@Test
void addPropertiesFilesToEnvironmentWithSinglePropertyFromVirtualFile() {
ConfigurableEnvironment environment = new MockEnvironment();
MutablePropertySources propertySources = environment.getPropertySources();
propertySources.remove(MockPropertySource.MOCK_PROPERTIES_PROPERTY_SOURCE_NAME);
assertThat(propertySources).isEmpty();
String pair = "key = value";
ByteArrayResource resource = new ByteArrayResource(pair.getBytes(), "from inlined property: " + pair);
ResourceLoader resourceLoader = mock();
given(resourceLoader.getResource(anyString())).willReturn(resource);
addPropertiesFilesToEnvironment(environment, resourceLoader, FOO_LOCATIONS);
assertThat(propertySources).hasSize(1);
assertThat(environment.getProperty("key")).isEqualTo("value");
}
@Test
void addInlinedPropertiesToEnvironmentWithNullContext() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addInlinedPropertiesToEnvironment((ConfigurableApplicationContext) null, KEY_VALUE_PAIR))
.withMessageContaining("'context' must not be null");
}
@Test
void addInlinedPropertiesToEnvironmentWithContextAndNullInlinedProperties() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addInlinedPropertiesToEnvironment(mock(ConfigurableApplicationContext.class), (String[]) null))
.withMessageContaining("'inlinedProperties' must not be null");
}
@Test
void addInlinedPropertiesToEnvironmentWithNullEnvironment() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addInlinedPropertiesToEnvironment((ConfigurableEnvironment) null, KEY_VALUE_PAIR))
.withMessageContaining("'environment' must not be null");
}
@Test
void addInlinedPropertiesToEnvironmentWithEnvironmentAndNullInlinedProperties() {
assertThatIllegalArgumentException()
.isThrownBy(() -> addInlinedPropertiesToEnvironment(new MockEnvironment(), (String[]) null))
.withMessageContaining("'inlinedProperties' must not be null");
}
@Test
void addInlinedPropertiesToEnvironmentWithMalformedUnicodeInValue() {
String properties = "key = \\uZZZZ";
assertThatIllegalStateException()
.isThrownBy(() -> addInlinedPropertiesToEnvironment(new MockEnvironment(), properties))
.withMessageContaining("Failed to load test environment properties from [%s]", properties);
}
@Test
void addInlinedPropertiesToEnvironmentWithMultipleKeyValuePairsInSingleInlinedProperty() {
ConfigurableEnvironment environment = new MockEnvironment();
MutablePropertySources propertySources = environment.getPropertySources();
propertySources.remove(MockPropertySource.MOCK_PROPERTIES_PROPERTY_SOURCE_NAME);
assertThat(propertySources).isEmpty();
addInlinedPropertiesToEnvironment(environment, """
a=b
x=y
""");
assertThat(propertySources).hasSize(1);
PropertySource<?> propertySource = propertySources.get(INLINED_PROPERTIES_PROPERTY_SOURCE_NAME);
assertThat(propertySource).isInstanceOf(MapPropertySource.class);
assertThat(((MapPropertySource) propertySource).getSource()).containsExactly(entry("a", "b"), entry("x", "y"));
}
@Test
void addInlinedPropertiesToEnvironmentWithEmptyProperty() {
ConfigurableEnvironment environment = new MockEnvironment();
MutablePropertySources propertySources = environment.getPropertySources();
propertySources.remove(MockPropertySource.MOCK_PROPERTIES_PROPERTY_SOURCE_NAME);
assertThat(propertySources).isEmpty();
addInlinedPropertiesToEnvironment(environment, " ");
assertThat(propertySources).hasSize(1);
PropertySource<?> propertySource = propertySources.get(INLINED_PROPERTIES_PROPERTY_SOURCE_NAME);
assertThat(propertySource).isInstanceOfSatisfying(MapPropertySource.class,
mps -> assertThat(mps.getSource()).isEmpty());
}
@Test
void convertInlinedPropertiesToMapWithNullInlinedProperties() {
assertThatIllegalArgumentException()
.isThrownBy(() -> convertInlinedPropertiesToMap((String[]) null))
.withMessageContaining("'inlinedProperties' must not be null");
}
private static void assertMergedTestPropertySources(Class<?> testClass, String[] expectedLocations,
String[] expectedProperties) {
MergedTestPropertySources mergedPropertySources = buildMergedTestPropertySources(testClass);
SoftAssertions.assertSoftly(softly -> {
softly.assertThat(mergedPropertySources).isNotNull();
Stream<String> locations = mergedPropertySources.getPropertySourceDescriptors().stream()
.map(PropertySourceDescriptor::locations).flatMap(List::stream);
softly.assertThat(locations).containsExactly(expectedLocations);
softly.assertThat(mergedPropertySources.getProperties()).isEqualTo(expectedProperties);
});
}
@SafeVarargs
private static <T> T[] asArray(T... arr) {
return arr;
}
@Retention(RetentionPolicy.RUNTIME)
@TestPropertySource(locations = "foo.properties", inheritLocations = false)
@
|
path
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/sql/internal/SqmMapEntryResult.java
|
{
"start": 1978,
"end": 3244
}
|
class ____<K, V, R> implements DomainResultAssembler<R> {
private final JavaType<R> javaType;
private final DomainResultAssembler<K> keyAssembler;
private final DomainResultAssembler<V> valueAssembler;
public EntryDomainResultAssembler(
JavaType<R> javaType, DomainResultAssembler<K> keyAssembler,
DomainResultAssembler<V> valueAssembler) {
this.javaType = javaType;
this.keyAssembler = keyAssembler;
this.valueAssembler = valueAssembler;
}
@Override
public R assemble(RowProcessingState rowProcessingState) {
final K key = keyAssembler.assemble( rowProcessingState );
final V value = valueAssembler.assemble( rowProcessingState );
//noinspection unchecked
return (R) Map.entry( key, value );
}
@Override
public JavaType<R> getAssembledJavaType() {
return javaType;
}
@Override
public <X> void forEachResultAssembler(BiConsumer<Initializer<?>, X> consumer, X arg) {
keyAssembler.forEachResultAssembler( consumer, arg );
valueAssembler.forEachResultAssembler( consumer, arg );
}
@Override
public void resolveState(RowProcessingState rowProcessingState) {
keyAssembler.resolveState( rowProcessingState );
valueAssembler.resolveState( rowProcessingState );
}
}
}
|
EntryDomainResultAssembler
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/clientproxy/finalmethod/FinalMethodIllegalWhenInjectedTest.java
|
{
"start": 575,
"end": 1164
}
|
class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(Moo.class, MooConsumer.class)
.strictCompatibility(true)
.shouldFail()
.build();
@Test
public void trigger() {
Throwable error = container.getFailure();
assertNotNull(error);
assertInstanceOf(DeploymentException.class, error);
assertTrue(error.getMessage().contains("must not declare non-static final methods"));
}
@ApplicationScoped
static
|
FinalMethodIllegalWhenInjectedTest
|
java
|
google__dagger
|
javatests/dagger/functional/modules/ModuleIncludesTest.java
|
{
"start": 967,
"end": 1040
}
|
class ____ {
@Component(modules = PublicModule.class)
|
ModuleIncludesTest
|
java
|
apache__rocketmq
|
auth/src/main/java/org/apache/rocketmq/auth/authentication/enums/UserType.java
|
{
"start": 962,
"end": 1621
}
|
enum ____ {
SUPER((byte) 1, "Super"),
NORMAL((byte) 2, "Normal");
@JSONField(value = true)
private final byte code;
private final String name;
UserType(byte code, String name) {
this.code = code;
this.name = name;
}
public static UserType getByName(String name) {
for (UserType subjectType : UserType.values()) {
if (StringUtils.equalsIgnoreCase(subjectType.getName(), name)) {
return subjectType;
}
}
return null;
}
public byte getCode() {
return code;
}
public String getName() {
return name;
}
}
|
UserType
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/tests/concurrent/InboundMessageQueueSpScTest.java
|
{
"start": 535,
"end": 719
}
|
class ____ extends InboundMessageQueueTest {
@Override
protected Context createContext(VertxInternal vertx) {
return vertx.createWorkerContext();
}
}
|
InboundMessageQueueSpScTest
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/vertx/OpenApiRoute.java
|
{
"start": 377,
"end": 681
}
|
class ____ {
@Route(path = "/", methods = HttpMethod.GET)
public String root() {
return "resource";
}
@Route(path = "/test-enums", methods = HttpMethod.GET)
public Query testEnums(@Param("query") String query) {
return Query.QUERY_PARAM_1;
}
public
|
OpenApiRoute
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/Functions.java
|
{
"start": 3224,
"end": 3646
}
|
interface ____<O1, O2, T extends Throwable> {
/**
* Accepts the consumer.
*
* @param object1 the first parameter for the consumable to accept
* @param object2 the second parameter for the consumable to accept
* @throws T Thrown when the consumer fails.
*/
void accept(O1 object1, O2 object2) throws T;
}
/**
* A functional
|
FailableBiConsumer
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/RestParameterException.java
|
{
"start": 858,
"end": 1497
}
|
class ____ extends RestException {
private static final long serialVersionUID = 1L;
public RestParameterException(String message) {
super(message);
}
public RestParameterException(Throwable cause) {
super(cause);
}
public RestParameterException(String message, Throwable cause) {
super(message, cause);
}
public RestParameterException(Messages message, Object... arguments) {
super(message, arguments);
}
public RestParameterException(Throwable cause, Messages message, Object... arguments) {
super(cause, message, arguments);
}
}
|
RestParameterException
|
java
|
apache__flink
|
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/windowing/assigners/EventTimeSessionWindows.java
|
{
"start": 1871,
"end": 4272
}
|
class ____ extends MergingWindowAssigner<Object, TimeWindow> {
private static final long serialVersionUID = 1L;
protected long sessionTimeout;
protected EventTimeSessionWindows(long sessionTimeout) {
if (sessionTimeout <= 0) {
throw new IllegalArgumentException(
"EventTimeSessionWindows parameters must satisfy 0 < size");
}
this.sessionTimeout = sessionTimeout;
}
@Override
public Collection<TimeWindow> assignWindows(
Object element, long timestamp, WindowAssignerContext context) {
return Collections.singletonList(new TimeWindow(timestamp, timestamp + sessionTimeout));
}
@Override
public Trigger<Object, TimeWindow> getDefaultTrigger() {
return EventTimeTrigger.create();
}
@Override
public String toString() {
return "EventTimeSessionWindows(" + sessionTimeout + ")";
}
/**
* Creates a new {@code SessionWindows} {@link WindowAssigner} that assigns elements to sessions
* based on the element timestamp.
*
* @param size The session timeout, i.e. the time gap between sessions
* @return The policy.
*/
public static EventTimeSessionWindows withGap(Duration size) {
return new EventTimeSessionWindows(size.toMillis());
}
/**
* Creates a new {@code SessionWindows} {@link WindowAssigner} that assigns elements to sessions
* based on the element timestamp.
*
* @param sessionWindowTimeGapExtractor The extractor to use to extract the time gap from the
* input elements
* @return The policy.
*/
@PublicEvolving
public static <T> DynamicEventTimeSessionWindows<T> withDynamicGap(
SessionWindowTimeGapExtractor<T> sessionWindowTimeGapExtractor) {
return new DynamicEventTimeSessionWindows<>(sessionWindowTimeGapExtractor);
}
@Override
public TypeSerializer<TimeWindow> getWindowSerializer(ExecutionConfig executionConfig) {
return new TimeWindow.Serializer();
}
@Override
public boolean isEventTime() {
return true;
}
/** Merge overlapping {@link TimeWindow}s. */
@Override
public void mergeWindows(
Collection<TimeWindow> windows, MergingWindowAssigner.MergeCallback<TimeWindow> c) {
TimeWindow.mergeWindows(windows, c);
}
}
|
EventTimeSessionWindows
|
java
|
apache__spark
|
launcher/src/main/java/org/apache/spark/launcher/SparkAppHandle.java
|
{
"start": 1288,
"end": 3823
}
|
enum ____ {
/** The application has not reported back yet. */
UNKNOWN(false),
/** The application has connected to the handle. */
CONNECTED(false),
/** The application has been submitted to the cluster. */
SUBMITTED(false),
/** The application is running. */
RUNNING(false),
/** The application finished with a successful status. */
FINISHED(true),
/** The application finished with a failed status. */
FAILED(true),
/** The application was killed. */
KILLED(true),
/** The Spark Submit JVM exited with a unknown status. */
LOST(true);
private final boolean isFinal;
State(boolean isFinal) {
this.isFinal = isFinal;
}
/**
* Whether this state is a final state, meaning the application is not running anymore
* once it's reached.
*/
public boolean isFinal() {
return isFinal;
}
}
/**
* Adds a listener to be notified of changes to the handle's information. Listeners will be called
* from the thread processing updates from the application, so they should avoid blocking or
* long-running operations.
*
* @param l Listener to add.
*/
void addListener(Listener l);
/** Returns the current application state. */
State getState();
/** Returns the application ID, or <code>null</code> if not yet known. */
String getAppId();
/**
* Asks the application to stop. This is best-effort, since the application may fail to receive
* or act on the command. Callers should watch for a state transition that indicates the
* application has really stopped.
*/
void stop();
/**
* Tries to kill the underlying application. Implies {@link #disconnect()}. This will not send
* a {@link #stop()} message to the application, so it's recommended that users first try to
* stop the application cleanly and only resort to this method if that fails.
*/
void kill();
/**
* Disconnects the handle from the application, without stopping it. After this method is called,
* the handle will not be able to communicate with the application anymore.
*/
void disconnect();
/**
* If the application failed due to an error, return the underlying error. If the app
* succeeded, this method returns an empty {@link Optional}.
*/
Optional<Throwable> getError();
/**
* Listener for updates to a handle's state. The callbacks do not receive information about
* what exactly has changed, just that an update has occurred.
*
* @since 1.6.0
*/
|
State
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/layout/JacksonFactory.java
|
{
"start": 5368,
"end": 7436
}
|
class ____ extends JacksonFactory {
private final boolean includeStacktrace;
private final boolean stacktraceAsString;
public YAML(final boolean includeStacktrace, final boolean stacktraceAsString) {
this.includeStacktrace = includeStacktrace;
this.stacktraceAsString = stacktraceAsString;
}
@Override
protected String getPropertyNameForTimeMillis() {
return JsonConstants.ELT_TIME_MILLIS;
}
@Override
protected String getPropertyNameForInstant() {
return JsonConstants.ELT_INSTANT;
}
@Override
protected String getPropertNameForContextMap() {
return JsonConstants.ELT_CONTEXT_MAP;
}
@Override
protected String getPropertNameForSource() {
return JsonConstants.ELT_SOURCE;
}
@Override
protected String getPropertNameForNanoTime() {
return JsonConstants.ELT_NANO_TIME;
}
@Override
protected PrettyPrinter newCompactPrinter() {
return new MinimalPrettyPrinter();
}
@Override
protected ObjectMapper newObjectMapper() {
return new Log4jYamlObjectMapper(false, includeStacktrace, stacktraceAsString);
}
@Override
protected PrettyPrinter newPrettyPrinter() {
return new DefaultPrettyPrinter();
}
}
/**
* When <Event>s are written into a XML file; the "Event" object is not the root element, but an element named
* <Events> created using {@link XmlLayout#getHeader()} and {@link XmlLayout#getFooter()} methods.
* <p>
* {@link com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter} is used to print the Event object into
* XML; hence it assumes <Event> tag as the root element, so it prints the <Event> tag without any
* indentation. To add an indentation to the <Event> tag; hence an additional indentation for any
* sub-elements, this
|
YAML
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoAnnotationErrorsTest.java
|
{
"start": 2470,
"end": 3322
}
|
class ____ {",
" @AutoAnnotation static String newString(int value) {",
" return new AutoAnnotation_Test_newString(value);",
" }",
"}");
Compilation compilation =
javac().withProcessors(new AutoAnnotationProcessor()).compile(TEST_ANNOTATION, testSource);
assertThat(compilation)
.hadErrorContaining("must be an annotation type, not java.lang.String")
.inFile(testSource)
.onLineContaining("static String newString(int value)");
}
@Test
public void testOverload() {
JavaFileObject testSource =
JavaFileObjects.forSourceLines(
"com.foo.Test",
"package com.foo;",
"",
"import com.example.TestAnnotation;",
"import com.google.auto.value.AutoAnnotation;",
"",
"
|
Test
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/reflect/PublicSubBean.java
|
{
"start": 1185,
"end": 1594
}
|
class ____ extends PackageBean {
/**
* A directly implemented property.
*/
private String foo = "This is foo";
/**
* Package private constructor, can only use factory method to create beans.
*/
public PublicSubBean() {
}
public String getFoo() {
return this.foo;
}
public void setFoo(final String foo) {
this.foo = foo;
}
}
|
PublicSubBean
|
java
|
google__guice
|
core/test/com/google/inject/BindingTest.java
|
{
"start": 5992,
"end": 6055
}
|
class ____ {
public PublicNoArg() { }
}
static
|
PublicNoArg
|
java
|
elastic__elasticsearch
|
x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/ClusterAlertHttpResource.java
|
{
"start": 1403,
"end": 7770
}
|
class ____ extends PublishableHttpResource {
private static final Logger logger = LogManager.getLogger(ClusterAlertHttpResource.class);
/**
* Use this to retrieve the version of Cluster Alert in the Watch's JSON response from a request.
*/
public static final Map<String, String> CLUSTER_ALERT_VERSION_PARAMETERS = Collections.singletonMap(
"filter_path",
"metadata.xpack.version_created"
);
/**
* License State is used to determine if we should even be add or delete our watches.
*/
private final XPackLicenseState licenseState;
/**
* The name of the Watch that is sent to the remote cluster.
*/
private final Supplier<String> watchId;
/**
* Provides a fully formed Watch (e.g., no variables that need replaced). If {@code null}, then we are always going to delete this
* Cluster Alert.
*/
@Nullable
private final Supplier<String> watch;
/**
* Create a new {@link ClusterAlertHttpResource}.
*
* @param resourceOwnerName The user-recognizable name.
* @param watchId The name of the watch, which is lazily loaded.
* @param watch The watch provider. {@code null} indicates that we should always delete this Watch.
*/
public ClusterAlertHttpResource(
final String resourceOwnerName,
final XPackLicenseState licenseState,
final Supplier<String> watchId,
@Nullable final Supplier<String> watch
) {
// Watcher does not support master_timeout
super(resourceOwnerName, null, CLUSTER_ALERT_VERSION_PARAMETERS);
this.licenseState = Objects.requireNonNull(licenseState);
this.watchId = Objects.requireNonNull(watchId);
this.watch = watch;
}
/**
* Determine if the current {@linkplain #watchId Watch} exists.
*/
@Override
protected void doCheck(final RestClient client, final ActionListener<Boolean> listener) {
// if we should be adding, then we need to check for existence
if (isWatchDefined() && Monitoring.MONITORING_CLUSTER_ALERTS_FEATURE.check(licenseState)) {
final CheckedFunction<Response, Boolean, IOException> watchChecker = (response) -> shouldReplaceClusterAlert(
response,
XContentType.JSON.xContent(),
LAST_UPDATED_VERSION
);
checkForResource(
client,
listener,
logger,
"/_watcher/watch",
watchId.get(),
"monitoring cluster alert",
resourceOwnerName,
"monitoring cluster",
GET_EXISTS,
GET_DOES_NOT_EXIST,
watchChecker,
this::alwaysReplaceResource
);
} else {
// if we should be deleting, then just try to delete it (same level of effort as checking)
deleteResource(
client,
listener,
logger,
"/_watcher/watch",
watchId.get(),
"monitoring cluster alert",
resourceOwnerName,
"monitoring cluster"
);
}
}
/**
* Publish the missing {@linkplain #watchId Watch}.
*/
@Override
protected void doPublish(final RestClient client, final ActionListener<ResourcePublishResult> listener) {
putResource(
client,
listener,
logger,
"/_watcher/watch",
watchId.get(),
Collections.emptyMap(),
this::watchToHttpEntity,
"monitoring cluster alert",
resourceOwnerName,
"monitoring cluster"
);
}
/**
* Determine if the {@link #watch} is defined. If not, then we should always delete the watch.
*
* @return {@code true} if {@link #watch} is defined (non-{@code null}). Otherwise {@code false}.
*/
boolean isWatchDefined() {
return watch != null;
}
/**
* Create a {@link HttpEntity} for the {@link #watch}.
*
* @return Never {@code null}.
*/
HttpEntity watchToHttpEntity() {
return new StringEntity(watch.get(), ContentType.APPLICATION_JSON);
}
/**
* Determine if the {@code response} contains a Watch whose value
*
* <p>
* This expects a response like:
* <pre><code>
* {
* "metadata": {
* "xpack": {
* "version": 6000002
* }
* }
* }
* </code></pre>
*
* @param response The filtered response from the Get Watcher API
* @param xContent The XContent parser to use
* @param minimumVersion The minimum version allowed without being replaced (expected to be the last updated version).
* @return {@code true} represents that it should be replaced. {@code false} that it should be left alone.
* @throws IOException if any issue occurs while parsing the {@code xContent} {@code response}.
* @throws RuntimeException if the response format is changed.
*/
boolean shouldReplaceClusterAlert(final Response response, final XContent xContent, final int minimumVersion) throws IOException {
// no named content used; so EMPTY is fine
final Map<String, Object> resources = XContentHelper.convertToMap(xContent, response.getEntity().getContent(), false);
// if it's empty, then there's no version in the response thanks to filter_path
if (resources.isEmpty() == false) {
@SuppressWarnings("unchecked")
final Map<String, Object> metadata = (Map<String, Object>) resources.get("metadata");
@SuppressWarnings("unchecked")
final Map<String, Object> xpack = metadata != null ? (Map<String, Object>) metadata.get("xpack") : null;
final Object version = xpack != null ? xpack.get("version_created") : null;
// if we don't have it (perhaps more fields were returned), then we need to replace it
if (version instanceof Number) {
// the version in the cluster alert is expected to include the alpha/beta/rc codes as well
return ((Number) version).intValue() < minimumVersion;
}
}
return true;
}
}
|
ClusterAlertHttpResource
|
java
|
apache__camel
|
components/camel-ibm/camel-ibm-secrets-manager/src/main/java/org/apache/camel/component/ibm/secrets/manager/IBMSecretsManagerOperation.java
|
{
"start": 867,
"end": 1041
}
|
enum ____ {
createArbitrarySecret,
createKVSecret,
getSecret,
deleteSecret,
listSecrets,
updateSecret,
listSecretVersions
}
|
IBMSecretsManagerOperation
|
java
|
resilience4j__resilience4j
|
resilience4j-spring-boot2/src/main/java/io/github/resilience4j/bulkhead/autoconfigure/BulkheadAutoConfiguration.java
|
{
"start": 2077,
"end": 2182
}
|
class ____ {
@Configuration
@ConditionalOnClass(Endpoint.class)
static
|
BulkheadAutoConfiguration
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java
|
{
"start": 32057,
"end": 32398
}
|
class ____ {
public String bar() {
return A.BAR;
}
}
""");
TestScanner scanner = inSamePackageScanner(true);
tests.add(scanner);
assertCompiles(scanner);
}
@Test
public void samePackageNegative() {
writeFile(
"A.java",
"""
package p;
public
|
B
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/oracle/create/OracleCreateViewTest4.java
|
{
"start": 1021,
"end": 5500
}
|
class ____ extends OracleTest {
public void test_types() throws Exception {
String sql = //
" CREATE OR REPLACE FORCE VIEW \"SC_001\".\"V_001\" (\"OBJ_OWNER\", \"OBJ_NAME\", \"OBJ_TYPE\", \"OBJ_ROWID\", \"DB_USER\", \"SID\", \"LOCK_TYPE\", \"ROW_WAIT_FILE#\", \"ROW_WAIT_BLOCK#\", \"ROW_WAIT_ROW#\") AS \n" +
" SELECT owner obj_owner,\n" +
" object_name obj_name,\n" +
" object_type obj_type,\n" +
" dbms_rowid.rowid_create(1, row_wait_obj#, ROW_WAIT_FILE#,\n" +
" ROW_WAIT_BLOCK#,ROW_WAIT_ROW#) obj_rowid,\n" +
" a.username db_user, a.SID SID, a.TYPE lock_type,\n" +
" a.row_wait_file#, a.row_wait_block#, a.row_wait_row#\n" +
" FROM TB_001,\n" +
" (SELECT /*+ no_merge(a) no_merge(b) */\n" +
" a.username, a.SID, a.row_wait_obj#, a.ROW_WAIT_FILE#,\n" +
" a.ROW_WAIT_BLOCK#, a.ROW_WAIT_ROW#, b.TYPE\n" +
" FROM sys.V_$SESSION a, sys.V_$LOCK b\n" +
" WHERE a.username IS NOT NULL\n" +
" AND a.row_wait_obj# <> -1\n" +
" AND a.SID = b.SID\n" +
" AND b.TYPE IN ('TX','TM')\n" +
" ) a\n" +
" WHERE object_id = a.row_wait_obj# ";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("CREATE OR REPLACE VIEW \"SC_001\".\"V_001\" (\n" +
"\t\"OBJ_OWNER\", \n" +
"\t\"OBJ_NAME\", \n" +
"\t\"OBJ_TYPE\", \n" +
"\t\"OBJ_ROWID\", \n" +
"\t\"DB_USER\", \n" +
"\t\"SID\", \n" +
"\t\"LOCK_TYPE\", \n" +
"\t\"ROW_WAIT_FILE#\", \n" +
"\t\"ROW_WAIT_BLOCK#\", \n" +
"\t\"ROW_WAIT_ROW#\"\n" +
")\n" +
"AS\n" +
"SELECT owner AS obj_owner, object_name AS obj_name, object_type AS obj_type\n" +
"\t, dbms_rowid.rowid_create(1, row_wait_obj#, ROW_WAIT_FILE#, ROW_WAIT_BLOCK#, ROW_WAIT_ROW#) AS obj_rowid\n" +
"\t, a.username AS db_user, a.SID AS SID, a.TYPE AS lock_type, a.row_wait_file#, a.row_wait_block#\n" +
"\t, a.row_wait_row#\n" +
"FROM TB_001, (\n" +
"\tSELECT /*+ no_merge(a) no_merge(b) */ a.username, a.SID, a.row_wait_obj#, a.ROW_WAIT_FILE#, a.ROW_WAIT_BLOCK#\n" +
"\t\t, a.ROW_WAIT_ROW#, b.TYPE\n" +
"\tFROM sys.V_$SESSION a, sys.V_$LOCK b\n" +
"\tWHERE a.username IS NOT NULL\n" +
"\t\tAND a.row_wait_obj# <> -1\n" +
"\t\tAND a.SID = b.SID\n" +
"\t\tAND b.TYPE IN ('TX', 'TM')\n" +
") a\n" +
"WHERE object_id = a.row_wait_obj#",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(3, visitor.getTables().size());
assertEquals(12, visitor.getColumns().size());
assertTrue(visitor.containsColumn("sys.V_$SESSION", "username"));
assertTrue(visitor.containsColumn("sys.V_$SESSION", "SID"));
assertTrue(visitor.containsColumn("sys.V_$SESSION", "row_wait_obj#"));
}
}
|
OracleCreateViewTest4
|
java
|
google__guice
|
extensions/persist/test/com/google/inject/persist/jpa/CustomPropsEntityManagerFactoryProvisionTest.java
|
{
"start": 1013,
"end": 1929
}
|
class ____ extends TestCase {
private Injector injector;
@Override
public void setUp() {
Properties props = new Properties();
props.put("blah", "blah");
injector = Guice.createInjector(new JpaPersistModule("testUnit").properties(props));
}
@Override
public final void tearDown() {
injector.getInstance(UnitOfWork.class).end();
injector.getInstance(EntityManagerFactory.class).close();
}
public void testSessionCreateOnInjection() {
assertEquals(
"SINGLETON VIOLATION " + UnitOfWork.class.getName(),
injector.getInstance(UnitOfWork.class),
injector.getInstance(UnitOfWork.class));
//startup persistence
injector.getInstance(PersistService.class).start();
injector.getInstance(UnitOfWork.class).begin();
//obtain em
assertTrue(injector.getInstance(EntityManager.class).isOpen());
}
}
|
CustomPropsEntityManagerFactoryProvisionTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/impl/TestFlagSet.java
|
{
"start": 2019,
"end": 12885
}
|
enum ____ { a }
/**
* Test that an entry can be enabled and disabled.
*/
@Test
public void testEntryEnableDisable() {
assertThat(flagSet.flags()).isEmpty();
assertDisabled(SimpleEnum.a);
flagSet.enable(SimpleEnum.a);
assertEnabled(SimpleEnum.a);
flagSet.disable(SimpleEnum.a);
assertDisabled(SimpleEnum.a);
}
/**
* Test the setter.
*/
@Test
public void testSetMethod() {
assertThat(flagSet.flags()).isEmpty();
flagSet.set(SimpleEnum.a, true);
assertEnabled(SimpleEnum.a);
flagSet.set(SimpleEnum.a, false);
assertDisabled(SimpleEnum.a);
}
/**
* Test mutability by making immutable and
* expecting setters to fail.
*/
@Test
public void testMutability() throws Throwable {
flagSet.set(SimpleEnum.a, true);
flagSet.makeImmutable();
intercept(IllegalStateException.class, () ->
flagSet.disable(SimpleEnum.a));
assertEnabled(SimpleEnum.a);
intercept(IllegalStateException.class, () ->
flagSet.set(SimpleEnum.a, false));
assertEnabled(SimpleEnum.a);
// now look at the setters
intercept(IllegalStateException.class, () ->
flagSet.enable(SimpleEnum.b));
assertDisabled(SimpleEnum.b);
intercept(IllegalStateException.class, () ->
flagSet.set(SimpleEnum.b, true));
assertDisabled(SimpleEnum.b);
}
/**
* Test stringification.
*/
@Test
public void testToString() throws Throwable {
// empty
assertStringValue("{}");
assertConfigurationStringMatches("");
// single value
flagSet.enable(SimpleEnum.a);
assertStringValue("{a}");
assertConfigurationStringMatches("a");
// add a second value.
flagSet.enable(SimpleEnum.b);
assertStringValue("{a, b}");
}
/**
* Assert that {@link FlagSet#toString()} matches the expected
* value.
* @param expected expected value
*/
private void assertStringValue(final String expected) {
assertThat(flagSet.toString())
.isEqualTo(expected);
}
/**
* Assert the configuration string form matches that expected.
*/
public void assertConfigurationStringMatches(final String expected) {
assertThat(flagSet.toConfigurationString())
.describedAs("Configuration string of %s", flagSet)
.isEqualTo(expected);
}
/**
* Test parsing from a configuration file.
* Multiple entries must be parsed, whitespace trimmed.
*/
@Test
public void testConfEntry() {
flagSet = flagSetFromConfig("a\t,\nc ", true);
assertFlagSetMatches(flagSet, SimpleEnum.a, SimpleEnum.c);
assertHasCapability(CAPABILITY_A);
assertHasCapability(CAPABILITY_C);
assertLacksCapability(CAPABILITY_B);
assertPathCapabilitiesMatch(flagSet, CAPABILITY_A, CAPABILITY_C);
}
/**
* Create a flagset from a configuration string.
* @param string configuration string.
* @param ignoreUnknown should unknown values be ignored?
* @return a flagset
*/
private static FlagSet<SimpleEnum> flagSetFromConfig(final String string,
final boolean ignoreUnknown) {
final Configuration conf = mkConf(string);
return buildFlagSet(SimpleEnum.class, conf, KEY, ignoreUnknown);
}
/**
* Test parsing from a configuration file,
* where an entry is unknown; the builder is set to ignoreUnknown.
*/
@Test
public void testConfEntryWithUnknownIgnored() {
flagSet = flagSetFromConfig("a, unknown", true);
assertFlagSetMatches(flagSet, SimpleEnum.a);
assertHasCapability(CAPABILITY_A);
assertLacksCapability(CAPABILITY_B);
assertLacksCapability(CAPABILITY_C);
}
/**
* Test parsing from a configuration file where
* the same entry is duplicated.
*/
@Test
public void testDuplicateConfEntry() {
flagSet = flagSetFromConfig("a,\ta,\na\"", true);
assertFlagSetMatches(flagSet, SimpleEnum.a);
assertHasCapability(CAPABILITY_A);
}
/**
* Handle an unknown configuration value.
*/
@Test
public void testConfUnknownFailure() throws Throwable {
intercept(IllegalArgumentException.class, () ->
flagSetFromConfig("a, unknown", false));
}
/**
* Create a configuration with {@link #KEY} set to the given value.
* @param value value to set
* @return the configuration.
*/
private static Configuration mkConf(final String value) {
final Configuration conf = new Configuration(false);
conf.set(KEY, value);
return conf;
}
/**
* Assert that the flagset has a capability.
* @param capability capability to probe for
*/
private void assertHasCapability(final String capability) {
assertThat(flagSet.hasCapability(capability))
.describedAs("Capability of %s on %s", capability, flagSet)
.isTrue();
}
/**
* Assert that the flagset lacks a capability.
* @param capability capability to probe for
*/
private void assertLacksCapability(final String capability) {
assertThat(flagSet.hasCapability(capability))
.describedAs("Capability of %s on %s", capability, flagSet)
.isFalse();
}
/**
* Test the * binding.
*/
@Test
public void testStarEntry() {
flagSet = flagSetFromConfig("*", false);
assertFlags(SimpleEnum.a, SimpleEnum.b, SimpleEnum.c);
assertHasCapability(CAPABILITY_A);
assertHasCapability(CAPABILITY_B);
assertThat(flagSet.pathCapabilities())
.describedAs("path capabilities of %s", flagSet)
.containsExactlyInAnyOrder(CAPABILITY_A, CAPABILITY_B, CAPABILITY_C);
}
@Test
public void testRoundTrip() {
final FlagSet<SimpleEnum> s1 = createFlagSet(SimpleEnum.class,
KEYDOT,
allOf(SimpleEnum.class));
final FlagSet<SimpleEnum> s2 = roundTrip(s1);
assertThat(s1.flags()).isEqualTo(s2.flags());
assertFlagSetMatches(s2, SimpleEnum.a, SimpleEnum.b, SimpleEnum.c);
}
@Test
public void testEmptyRoundTrip() {
final FlagSet<SimpleEnum> s1 = createFlagSet(SimpleEnum.class, KEYDOT,
noneOf(SimpleEnum.class));
final FlagSet<SimpleEnum> s2 = roundTrip(s1);
assertThat(s1.flags())
.isEqualTo(s2.flags());
assertThat(s2.isEmpty())
.describedAs("empty flagset %s", s2)
.isTrue();
assertFlagSetMatches(flagSet);
assertThat(flagSet.pathCapabilities())
.describedAs("path capabilities of %s", flagSet)
.isEmpty();
}
@Test
public void testSetIsClone() {
final EnumSet<SimpleEnum> flags = noneOf(SimpleEnum.class);
final FlagSet<SimpleEnum> s1 = createFlagSet(SimpleEnum.class, KEYDOT, flags);
s1.enable(SimpleEnum.b);
// set a source flag
flags.add(SimpleEnum.a);
// verify the derived flagset is unchanged
assertFlagSetMatches(s1, SimpleEnum.b);
}
@Test
public void testEquality() {
final FlagSet<SimpleEnum> s1 = createFlagSet(SimpleEnum.class, KEYDOT, SimpleEnum.a);
final FlagSet<SimpleEnum> s2 = createFlagSet(SimpleEnum.class, KEYDOT, SimpleEnum.a);
// make one of them immutable
s2.makeImmutable();
assertThat(s1)
.describedAs("s1 == s2")
.isEqualTo(s2);
assertThat(s1.hashCode())
.describedAs("hashcode of s1 == hashcode of s2")
.isEqualTo(s2.hashCode());
}
@Test
public void testInequality() {
final FlagSet<SimpleEnum> s1 =
createFlagSet(SimpleEnum.class, KEYDOT, noneOf(SimpleEnum.class));
final FlagSet<SimpleEnum> s2 =
createFlagSet(SimpleEnum.class, KEYDOT, SimpleEnum.a, SimpleEnum.b);
assertThat(s1)
.describedAs("s1 == s2")
.isNotEqualTo(s2);
}
@Test
public void testClassInequality() {
final FlagSet<?> s1 =
createFlagSet(SimpleEnum.class, KEYDOT, noneOf(SimpleEnum.class));
final FlagSet<?> s2 =
createFlagSet(OtherEnum.class, KEYDOT, OtherEnum.a);
assertThat(s1)
.describedAs("s1 == s2")
.isNotEqualTo(s2);
}
/**
* The copy operation creates a new instance which is now mutable,
* even if the original was immutable.
*/
@Test
public void testCopy() throws Throwable {
FlagSet<SimpleEnum> s1 =
createFlagSet(SimpleEnum.class, KEYDOT, SimpleEnum.a, SimpleEnum.b);
s1.makeImmutable();
FlagSet<SimpleEnum> s2 = s1.copy();
assertThat(s2)
.describedAs("copy of %s", s1)
.isNotSameAs(s1);
assertThat(!s2.isImmutable())
.describedAs("set %s is immutable", s2)
.isTrue();
assertThat(s1)
.describedAs("s1 == s2")
.isEqualTo(s2);
}
@Test
public void testCreateNullEnumClass() throws Throwable {
intercept(NullPointerException.class, () ->
createFlagSet(null, KEYDOT, SimpleEnum.a));
}
@Test
public void testCreateNullPrefix() throws Throwable {
intercept(NullPointerException.class, () ->
createFlagSet(SimpleEnum.class, null, SimpleEnum.a));
}
/**
* Round trip a FlagSet.
* @param flagset FlagSet to save to a configuration and retrieve.
* @return a new FlagSet.
*/
private FlagSet<SimpleEnum> roundTrip(FlagSet<SimpleEnum> flagset) {
final Configuration conf = new Configuration(false);
conf.set(KEY, flagset.toConfigurationString());
return buildFlagSet(SimpleEnum.class, conf, KEY, false);
}
/**
* Assert a flag is enabled in the {@link #flagSet} field.
* @param flag flag to check
*/
private void assertEnabled(final SimpleEnum flag) {
assertThat(flagSet.enabled(flag))
.describedAs("status of flag %s in %s", flag, flagSet)
.isTrue();
}
/**
* Assert a flag is disabled in the {@link #flagSet} field.
* @param flag flag to check
*/
private void assertDisabled(final SimpleEnum flag) {
assertThat(flagSet.enabled(flag))
.describedAs("status of flag %s in %s", flag, flagSet)
.isFalse();
}
/**
* Assert that a set of flags are enabled in the {@link #flagSet} field.
* @param flags flags which must be set.
*/
private void assertFlags(final SimpleEnum... flags) {
for (SimpleEnum flag : flags) {
assertEnabled(flag);
}
}
/**
* Assert that a FlagSet contains an exclusive set of values.
* @param flags flags which must be set.
*/
private void assertFlagSetMatches(
FlagSet<SimpleEnum> fs,
SimpleEnum... flags) {
assertThat(fs.flags())
.describedAs("path capabilities of %s", fs)
.containsExactly(flags);
}
/**
* Assert that a flagset contains exactly the capabilities.
* This is calculated by getting the list of active capabilities
* and asserting on the list.
* @param fs flagset
* @param capabilities capabilities
*/
private void assertPathCapabilitiesMatch(
FlagSet<SimpleEnum> fs,
String... capabilities) {
assertThat(fs.pathCapabilities())
.describedAs("path capabilities of %s", fs)
.containsExactlyInAnyOrder(capabilities);
}
}
|
OtherEnum
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java
|
{
"start": 125230,
"end": 125350
}
|
class ____ implements Principal {
@Override
public String getName() {
return "other";
}
}
static
|
OtherPrincipal
|
java
|
netty__netty
|
handler-ssl-ocsp/src/main/java/io/netty/handler/ssl/ocsp/IoTransport.java
|
{
"start": 1243,
"end": 3460
}
|
class ____ {
private final EventLoop eventLoop;
private final ChannelFactory<SocketChannel> socketChannel;
private final ChannelFactory<DatagramChannel> datagramChannel;
/**
* Default {@link IoTransport} which uses {@link NioIoHandler}, {@link NioSocketChannel}
* and {@link NioDatagramChannel}.
*/
public static final IoTransport DEFAULT = new IoTransport(
new MultiThreadIoEventLoopGroup(1, NioIoHandler.newFactory()).next(),
new ChannelFactory<SocketChannel>() {
@Override
public SocketChannel newChannel() {
return new NioSocketChannel();
}
},
new ChannelFactory<DatagramChannel>() {
@Override
public DatagramChannel newChannel() {
return new NioDatagramChannel();
}
});
/**
* Create a new {@link IoTransport} instance
*
* @param eventLoop {@link EventLoop} to use for I/O
* @param socketChannel {@link SocketChannel} for TCP DNS lookup and OCSP query
* @param datagramChannel {@link DatagramChannel} for UDP DNS lookup
* @return {@link NullPointerException} if any parameter is {@code null}
*/
public static IoTransport create(EventLoop eventLoop, ChannelFactory<SocketChannel> socketChannel,
ChannelFactory<DatagramChannel> datagramChannel) {
return new IoTransport(eventLoop, socketChannel, datagramChannel);
}
private IoTransport(EventLoop eventLoop, ChannelFactory<SocketChannel> socketChannel,
ChannelFactory<DatagramChannel> datagramChannel) {
this.eventLoop = checkNotNull(eventLoop, "EventLoop");
this.socketChannel = checkNotNull(socketChannel, "SocketChannel");
this.datagramChannel = checkNotNull(datagramChannel, "DatagramChannel");
}
public EventLoop eventLoop() {
return eventLoop;
}
public ChannelFactory<SocketChannel> socketChannel() {
return socketChannel;
}
public ChannelFactory<DatagramChannel> datagramChannel() {
return datagramChannel;
}
}
|
IoTransport
|
java
|
elastic__elasticsearch
|
modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java
|
{
"start": 1094,
"end": 4901
}
|
class ____ implements ToXContentFragment, Writeable {
private final String queryId;
private final double metricScore;
private MetricDetail optionalMetricDetails;
private final List<RatedSearchHit> ratedHits;
public EvalQueryQuality(String id, double metricScore) {
this(id, metricScore, new ArrayList<>(), null);
}
public EvalQueryQuality(StreamInput in) throws IOException {
this(
in.readString(),
in.readDouble(),
in.readCollectionAsList(RatedSearchHit::new),
in.readOptionalNamedWriteable(MetricDetail.class)
);
}
EvalQueryQuality(String queryId, double evaluationResult, List<RatedSearchHit> ratedHits, MetricDetail optionalMetricDetails) {
this.queryId = queryId;
this.metricScore = evaluationResult;
this.optionalMetricDetails = optionalMetricDetails;
this.ratedHits = ratedHits;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(queryId);
out.writeDouble(metricScore);
out.writeCollection(ratedHits);
out.writeOptionalNamedWriteable(this.optionalMetricDetails);
}
public String getId() {
return queryId;
}
public double metricScore() {
return metricScore;
}
public void setMetricDetails(MetricDetail breakdown) {
this.optionalMetricDetails = breakdown;
}
public MetricDetail getMetricDetails() {
return this.optionalMetricDetails;
}
public void addHitsAndRatings(List<RatedSearchHit> hits) {
this.ratedHits.addAll(hits);
}
public List<RatedSearchHit> getHitsAndRatings() {
return this.ratedHits;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(queryId);
builder.field(METRIC_SCORE_FIELD.getPreferredName(), this.metricScore);
builder.startArray(UNRATED_DOCS_FIELD.getPreferredName());
for (DocumentKey key : EvaluationMetric.filterUnratedDocuments(ratedHits)) {
builder.startObject();
builder.field(RatedDocument.INDEX_FIELD.getPreferredName(), key.index());
builder.field(RatedDocument.DOC_ID_FIELD.getPreferredName(), key.docId());
builder.endObject();
}
builder.endArray();
builder.startArray(HITS_FIELD.getPreferredName());
for (RatedSearchHit hit : ratedHits) {
hit.toXContent(builder, params);
}
builder.endArray();
if (optionalMetricDetails != null) {
builder.field(METRIC_DETAILS_FIELD.getPreferredName(), optionalMetricDetails);
}
builder.endObject();
return builder;
}
static final ParseField METRIC_SCORE_FIELD = new ParseField("metric_score");
private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs");
static final ParseField HITS_FIELD = new ParseField("hits");
static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details");
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
EvalQueryQuality other = (EvalQueryQuality) obj;
return Objects.equals(queryId, other.queryId)
&& Objects.equals(metricScore, other.metricScore)
&& Objects.equals(ratedHits, other.ratedHits)
&& Objects.equals(optionalMetricDetails, other.optionalMetricDetails);
}
@Override
public final int hashCode() {
return Objects.hash(queryId, metricScore, ratedHits, optionalMetricDetails);
}
}
|
EvalQueryQuality
|
java
|
redisson__redisson
|
redisson-hibernate/redisson-hibernate-4/src/main/java/org/redisson/hibernate/strategy/AbstractReadWriteAccessStrategy.java
|
{
"start": 913,
"end": 1851
}
|
class ____ extends BaseRegionAccessStrategy {
final RMapCache<Object, Object> mapCache;
public AbstractReadWriteAccessStrategy(Settings settings, GeneralDataRegion region, RMapCache<Object, Object> mapCache) {
super(settings, region);
this.mapCache = mapCache;
}
@Override
public Object get(Object key, long txTimestamp) throws CacheException {
return region.get(key);
}
@Override
public boolean putFromLoad(Object key, Object value, long txTimestamp, Object version, boolean minimalPutOverride)
throws CacheException {
region.put(key, value);
return true;
}
@Override
public SoftLock lockItem(Object key, Object version) throws CacheException {
return null;
}
@Override
public void unlockItem(Object key, SoftLock lock) throws CacheException {
region.evict(key);
}
}
|
AbstractReadWriteAccessStrategy
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/concurrent/CompleteFuture.java
|
{
"start": 890,
"end": 3941
}
|
class ____<V> extends AbstractFuture<V> {
private final EventExecutor executor;
/**
* Creates a new instance.
*
* @param executor the {@link EventExecutor} associated with this future
*/
protected CompleteFuture(EventExecutor executor) {
this.executor = executor;
}
/**
* Return the {@link EventExecutor} which is used by this {@link CompleteFuture}.
*/
protected EventExecutor executor() {
return executor;
}
@Override
public Future<V> addListener(GenericFutureListener<? extends Future<? super V>> listener) {
DefaultPromise.notifyListener(executor(), this, ObjectUtil.checkNotNull(listener, "listener"));
return this;
}
@Override
public Future<V> addListeners(GenericFutureListener<? extends Future<? super V>>... listeners) {
for (GenericFutureListener<? extends Future<? super V>> l:
ObjectUtil.checkNotNull(listeners, "listeners")) {
if (l == null) {
break;
}
DefaultPromise.notifyListener(executor(), this, l);
}
return this;
}
@Override
public Future<V> removeListener(GenericFutureListener<? extends Future<? super V>> listener) {
// NOOP
return this;
}
@Override
public Future<V> removeListeners(GenericFutureListener<? extends Future<? super V>>... listeners) {
// NOOP
return this;
}
@Override
public Future<V> await() throws InterruptedException {
if (Thread.interrupted()) {
throw new InterruptedException();
}
return this;
}
@Override
public boolean await(long timeout, TimeUnit unit) throws InterruptedException {
if (Thread.interrupted()) {
throw new InterruptedException();
}
return true;
}
@Override
public Future<V> sync() throws InterruptedException {
return this;
}
@Override
public Future<V> syncUninterruptibly() {
return this;
}
@Override
public boolean await(long timeoutMillis) throws InterruptedException {
if (Thread.interrupted()) {
throw new InterruptedException();
}
return true;
}
@Override
public Future<V> awaitUninterruptibly() {
return this;
}
@Override
public boolean awaitUninterruptibly(long timeout, TimeUnit unit) {
return true;
}
@Override
public boolean awaitUninterruptibly(long timeoutMillis) {
return true;
}
@Override
public boolean isDone() {
return true;
}
@Override
public boolean isCancellable() {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
/**
* {@inheritDoc}
*
* @param mayInterruptIfRunning this value has no effect in this implementation.
*/
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
}
|
CompleteFuture
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/MultipleNullnessAnnotationsTest.java
|
{
"start": 834,
"end": 1493
}
|
class ____ {
private final CompilationTestHelper testHelper =
CompilationTestHelper.newInstance(MultipleNullnessAnnotations.class, getClass());
@Test
public void positive() {
testHelper
.addSourceLines(
"Test.java",
"""
import org.checkerframework.checker.nullness.compatqual.NonNullDecl;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.util.List;
abstract
|
MultipleNullnessAnnotationsTest
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/annotation/UnknownSerializer.java
|
{
"start": 964,
"end": 1112
}
|
class ____ {@link DataTypeHint} for representing an unknown serializer that should be
* replaced with a more specific class.
*/
@Internal
abstract
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsSystemIndicesIntegTests.java
|
{
"start": 4258,
"end": 4970
}
|
class ____ extends Plugin implements SystemIndexPlugin {
static final String INDEX_NAME = ".test-system-index";
@Override
public Collection<SystemIndexDescriptor> getSystemIndexDescriptors(Settings settings) {
return List.of(
SystemIndexDescriptorUtils.createUnmanaged(INDEX_NAME + "*", "System index for [" + getTestClass().getName() + ']')
);
}
@Override
public String getFeatureName() {
return SearchableSnapshotsSystemIndicesIntegTests.class.getSimpleName();
}
@Override
public String getFeatureDescription() {
return "test plugin";
}
}
}
|
TestSystemIndexPlugin
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.