language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-jackson/src/test/java/org/apache/camel/component/jackson/JacksonModuleRefTest.java | {
"start": 942,
"end": 1527
} | class ____ extends JacksonModuleTest {
@BindToRegistry("myJacksonModule")
private MyModule module = new MyModule();
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
JacksonDataFormat format = new JacksonDataFormat();
format.setInclude("NON_NULL");
format.setModuleRefs("myJacksonModule");
from("direct:marshal").marshal(format).to("mock:marshal");
}
};
}
}
| JacksonModuleRefTest |
java | apache__rocketmq | tools/src/test/java/org/apache/rocketmq/tools/command/namesrv/UpdateKvConfigCommandTest.java | {
"start": 1306,
"end": 2531
} | class ____ {
private ServerResponseMocker brokerMocker;
private ServerResponseMocker nameServerMocker;
@Before
public void before() {
brokerMocker = startOneBroker();
nameServerMocker = NameServerMocker.startByDefaultConf(brokerMocker.listenPort());
}
@After
public void after() {
brokerMocker.shutdown();
nameServerMocker.shutdown();
}
@Test
public void testExecute() throws SubCommandException {
UpdateKvConfigCommand cmd = new UpdateKvConfigCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {
"-s namespace", "-k topicname", "-v unit_test",
String.format("-n localhost:%d", nameServerMocker.listenPort())};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName() + cmd.commandDesc(), subargs,
cmd.buildCommandlineOptions(options), new DefaultParser());
cmd.execute(commandLine, options, null);
}
private ServerResponseMocker startOneBroker() {
// start broker
return ServerResponseMocker.startServer(null);
}
}
| UpdateKvConfigCommandTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2700/Issue2754.java | {
"start": 2208,
"end": 2253
} | class ____{
public Calendar p1;
}
}
| C |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/cache/interceptor/CacheOperationExpressionEvaluator.java | {
"start": 1469,
"end": 4468
} | class ____ extends CachedExpressionEvaluator {
/**
* Indicate that there is no result variable.
*/
public static final Object NO_RESULT = new Object();
/**
* Indicate that the result variable cannot be used at all.
*/
public static final Object RESULT_UNAVAILABLE = new Object();
/**
* The name of the variable holding the result object.
*/
public static final String RESULT_VARIABLE = "result";
private final Map<ExpressionKey, Expression> keyCache = new ConcurrentHashMap<>(64);
private final Map<ExpressionKey, Expression> conditionCache = new ConcurrentHashMap<>(64);
private final Map<ExpressionKey, Expression> unlessCache = new ConcurrentHashMap<>(64);
private final CacheEvaluationContextFactory evaluationContextFactory;
public CacheOperationExpressionEvaluator(CacheEvaluationContextFactory evaluationContextFactory) {
super();
this.evaluationContextFactory = evaluationContextFactory;
this.evaluationContextFactory.setParameterNameDiscoverer(this::getParameterNameDiscoverer);
}
/**
* Create an {@link EvaluationContext}.
* @param caches the current caches
* @param method the method
* @param args the method arguments
* @param target the target object
* @param targetClass the target class
* @param result the return value (can be {@code null}) or
* {@link #NO_RESULT} if there is no return at this time
* @return the evaluation context
*/
public EvaluationContext createEvaluationContext(Collection<? extends Cache> caches,
Method method, @Nullable Object[] args, Object target, Class<?> targetClass, Method targetMethod,
@Nullable Object result) {
CacheExpressionRootObject rootObject = new CacheExpressionRootObject(
caches, method, args, target, targetClass);
CacheEvaluationContext evaluationContext = this.evaluationContextFactory
.forOperation(rootObject, targetMethod, args);
if (result == RESULT_UNAVAILABLE) {
evaluationContext.addUnavailableVariable(RESULT_VARIABLE);
}
else if (result != NO_RESULT) {
evaluationContext.setVariable(RESULT_VARIABLE, result);
}
return evaluationContext;
}
public @Nullable Object key(String keyExpression, AnnotatedElementKey methodKey, EvaluationContext evalContext) {
return getExpression(this.keyCache, methodKey, keyExpression).getValue(evalContext);
}
public boolean condition(String conditionExpression, AnnotatedElementKey methodKey, EvaluationContext evalContext) {
return (Boolean.TRUE.equals(getExpression(this.conditionCache, methodKey, conditionExpression).getValue(
evalContext, Boolean.class)));
}
public boolean unless(String unlessExpression, AnnotatedElementKey methodKey, EvaluationContext evalContext) {
return (Boolean.TRUE.equals(getExpression(this.unlessCache, methodKey, unlessExpression).getValue(
evalContext, Boolean.class)));
}
/**
* Clear all caches.
*/
void clear() {
this.keyCache.clear();
this.conditionCache.clear();
this.unlessCache.clear();
}
}
| CacheOperationExpressionEvaluator |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/scalar/ArrayIntersectFunction.java | {
"start": 1692,
"end": 4279
} | class ____ extends BuiltInScalarFunction {
private final ArrayData.ElementGetter elementGetter;
private final EqualityAndHashcodeProvider equalityAndHashcodeProvider;
public ArrayIntersectFunction(SpecializedFunction.SpecializedContext context) {
super(BuiltInFunctionDefinitions.ARRAY_INTERSECT, context);
final DataType dataType =
((CollectionDataType) context.getCallContext().getArgumentDataTypes().get(0))
.getElementDataType()
.toInternal();
elementGetter = ArrayData.createElementGetter(dataType.toInternal().getLogicalType());
this.equalityAndHashcodeProvider = new EqualityAndHashcodeProvider(context, dataType);
}
@Override
public void open(FunctionContext context) throws Exception {
equalityAndHashcodeProvider.open(context);
}
public @Nullable ArrayData eval(ArrayData arrayOne, ArrayData arrayTwo) {
try {
if (arrayOne == null || arrayTwo == null) {
return null;
}
Set<ObjectContainer> set = new HashSet<>();
for (int pos = 0; pos < arrayTwo.size(); pos++) {
final Object element = elementGetter.getElementOrNull(arrayTwo, pos);
final ObjectContainer objectContainer = createObjectContainer(element);
set.add(objectContainer);
}
Set<ObjectContainer> res = new LinkedHashSet<>();
for (int pos = 0; pos < arrayOne.size(); pos++) {
final Object element = elementGetter.getElementOrNull(arrayOne, pos);
final ObjectContainer objectContainer = createObjectContainer(element);
if (set.contains(objectContainer)) {
res.add(objectContainer);
}
}
return new GenericArrayData(
res.stream()
.map(element -> element != null ? element.getObject() : null)
.toArray());
} catch (Throwable t) {
throw new FlinkRuntimeException(t);
}
}
private ObjectContainer createObjectContainer(Object element) {
if (element == null) {
return null;
}
return new ObjectContainer(
element,
equalityAndHashcodeProvider::equals,
equalityAndHashcodeProvider::hashCode);
}
@Override
public void close() throws Exception {
equalityAndHashcodeProvider.close();
}
}
| ArrayIntersectFunction |
java | hibernate__hibernate-orm | hibernate-agroal/src/test/java/org/hibernate/test/agroal/AgroalSkipAutoCommitTest.java | {
"start": 3390,
"end": 3615
} | class ____ implements SettingProvider.Provider<PreparedStatementSpyConnectionProvider> {
@Override
public PreparedStatementSpyConnectionProvider getSetting() {
return connectionProvider;
}
}
}
| ConnectionProviderProvider |
java | apache__kafka | connect/api/src/main/java/org/apache/kafka/connect/data/ConnectSchema.java | {
"start": 1148,
"end": 14038
} | class ____ implements Schema {
/**
* Maps {@link Schema.Type}s to a list of Java classes that can be used to represent them.
*/
private static final Map<Type, List<Class<?>>> SCHEMA_TYPE_CLASSES = Collections.unmodifiableMap(new EnumMap<>(Map.ofEntries(
Map.entry(Type.INT8, List.of(Byte.class)),
Map.entry(Type.INT16, List.of(Short.class)),
Map.entry(Type.INT32, List.of(Integer.class)),
Map.entry(Type.INT64, List.of(Long.class)),
Map.entry(Type.FLOAT32, List.of(Float.class)),
Map.entry(Type.FLOAT64, List.of(Double.class)),
Map.entry(Type.BOOLEAN, List.of(Boolean.class)),
Map.entry(Type.STRING, List.of(String.class)),
// Bytes are special and have 2 representations. byte[] causes problems because it doesn't handle equals() and
// hashCode() like we want objects to, so we support both byte[] and ByteBuffer. Using plain byte[] can cause
// those methods to fail, so ByteBuffers are recommended
Map.entry(Type.BYTES, List.of(byte[].class, ByteBuffer.class)),
Map.entry(Type.ARRAY, List.of(List.class)),
Map.entry(Type.MAP, List.of(Map.class)),
Map.entry(Type.STRUCT, List.of(Struct.class))
)));
/**
* Maps known logical types to a list of Java classes that can be used to represent them.
*/
// We don't need to put these into JAVA_CLASS_SCHEMA_TYPES since that's only used to determine schemas for
// schemaless data and logical types will have ambiguous schemas (e.g. many of them use the same Java class) so
// they should not be used without schemas.
private static final Map<String, List<Class<?>>> LOGICAL_TYPE_CLASSES = Map.of(
Decimal.LOGICAL_NAME, List.of(BigDecimal.class),
Date.LOGICAL_NAME, List.of(java.util.Date.class),
Time.LOGICAL_NAME, List.of(java.util.Date.class),
Timestamp.LOGICAL_NAME, List.of(java.util.Date.class)
);
/**
* Maps the Java classes to the corresponding {@link Schema.Type}.
*/
private static final Map<Class<?>, Type> JAVA_CLASS_SCHEMA_TYPES = SCHEMA_TYPE_CLASSES.entrySet()
.stream()
.flatMap(entry -> entry.getValue().stream().map(klass -> Map.entry(klass, entry.getKey())))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
// The type of the field
private final Type type;
private final boolean optional;
private final Object defaultValue;
private final List<Field> fields;
private final Map<String, Field> fieldsByName;
private final Schema keySchema;
private final Schema valueSchema;
// Optional name and version provide a built-in way to indicate what type of data is included. Most
// useful for structs to indicate the semantics of the struct and map it to some existing underlying
// serializer-specific schema. However, can also be useful in specifying other logical types (e.g. a set is an array
// with additional constraints).
private final String name;
private final Integer version;
// Optional human readable documentation describing this schema.
private final String doc;
private final Map<String, String> parameters;
// precomputed hash code. There is no need to re-compute every time hashCode() is called.
private Integer hash = null;
/**
* Construct a Schema. Most users should not construct schemas manually, preferring {@link SchemaBuilder} instead.
*/
public ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema) {
this.type = type;
this.optional = optional;
this.defaultValue = defaultValue;
this.name = name;
this.version = version;
this.doc = doc;
this.parameters = parameters;
if (this.type == Type.STRUCT) {
this.fields = fields == null ? List.of() : fields;
this.fieldsByName = new HashMap<>(this.fields.size());
for (Field field : this.fields)
fieldsByName.put(field.name(), field);
} else {
this.fields = null;
this.fieldsByName = null;
}
this.keySchema = keySchema;
this.valueSchema = valueSchema;
}
/**
* Construct a Schema for a primitive type, setting schema parameters, struct fields, and key and value schemas to null.
*/
public ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc) {
this(type, optional, defaultValue, name, version, doc, null, null, null, null);
}
/**
* Construct a default schema for a primitive type. The schema is required, has no default value, name, version,
* or documentation.
*/
public ConnectSchema(Type type) {
this(type, false, null, null, null, null);
}
@Override
public Type type() {
return type;
}
@Override
public boolean isOptional() {
return optional;
}
@Override
public Object defaultValue() {
return defaultValue;
}
@Override
public String name() {
return name;
}
@Override
public Integer version() {
return version;
}
@Override
public String doc() {
return doc;
}
@Override
public Map<String, String> parameters() {
return parameters;
}
@Override
public List<Field> fields() {
if (type != Type.STRUCT)
throw new DataException("Cannot list fields on non-struct type");
return fields;
}
@Override
public Field field(String fieldName) {
if (type != Type.STRUCT)
throw new DataException("Cannot look up fields on non-struct type");
return fieldsByName.get(fieldName);
}
@Override
public Schema keySchema() {
if (type != Type.MAP)
throw new DataException("Cannot look up key schema on non-map type");
return keySchema;
}
@Override
public Schema valueSchema() {
if (type != Type.MAP && type != Type.ARRAY)
throw new DataException("Cannot look up value schema on non-array and non-map type");
return valueSchema;
}
/**
* Validate that the value can be used with the schema, i.e. that its type matches the schema type and nullability
* requirements. Throws a {@link DataException} if the value is invalid.
* @param schema Schema to test
* @param value value to test
*/
public static void validateValue(Schema schema, Object value) {
validateValue(null, schema, value);
}
public static void validateValue(String field, Schema schema, Object value) {
validateValue(schema, value, field == null ? "value" : "field: \"" + field + "\"");
}
private static void validateValue(Schema schema, Object value, String location) {
if (value == null) {
if (!schema.isOptional())
throw new DataException("Invalid value: null used for required " + location
+ ", schema type: " + schema.type());
return;
}
List<Class<?>> expectedClasses = expectedClassesFor(schema);
boolean foundMatch = false;
for (Class<?> expectedClass : expectedClasses) {
if (expectedClass.isInstance(value)) {
foundMatch = true;
break;
}
}
if (!foundMatch) {
StringBuilder exceptionMessage = new StringBuilder("Invalid Java object for schema");
if (schema.name() != null) {
exceptionMessage.append(" \"").append(schema.name()).append("\"");
}
exceptionMessage.append(" with type ").append(schema.type()).append(": ").append(value.getClass());
if (location != null) {
exceptionMessage.append(" for ").append(location);
}
throw new DataException(exceptionMessage.toString());
}
switch (schema.type()) {
case STRUCT:
Struct struct = (Struct) value;
if (!struct.schema().equals(schema))
throw new DataException("Struct schemas do not match.");
struct.validate();
break;
case ARRAY:
List<?> array = (List<?>) value;
String entryLocation = "element of array " + location;
Schema arrayValueSchema = assertSchemaNotNull(schema.valueSchema(), entryLocation);
for (Object entry : array) {
validateValue(arrayValueSchema, entry, entryLocation);
}
break;
case MAP:
Map<?, ?> map = (Map<?, ?>) value;
String keyLocation = "key of map " + location;
String valueLocation = "value of map " + location;
Schema mapKeySchema = assertSchemaNotNull(schema.keySchema(), keyLocation);
Schema mapValueSchema = assertSchemaNotNull(schema.valueSchema(), valueLocation);
for (Map.Entry<?, ?> entry : map.entrySet()) {
validateValue(mapKeySchema, entry.getKey(), keyLocation);
validateValue(mapValueSchema, entry.getValue(), valueLocation);
}
break;
}
}
private static Schema assertSchemaNotNull(Schema schema, String location) {
if (schema == null) {
throw new DataException("No schema defined for " + location);
}
return schema;
}
private static List<Class<?>> expectedClassesFor(Schema schema) {
List<Class<?>> expectedClasses = null;
if (schema.name() != null) {
expectedClasses = LOGICAL_TYPE_CLASSES.get(schema.name());
}
if (expectedClasses == null)
expectedClasses = SCHEMA_TYPE_CLASSES.getOrDefault(schema.type(), List.of());
return expectedClasses;
}
/**
* Validate that the value can be used for this schema, i.e. that its type matches the schema type and optional
* requirements. Throws a {@link DataException} if the value is invalid.
* @param value the value to validate
*/
public void validateValue(Object value) {
validateValue(this, value);
}
@Override
public ConnectSchema schema() {
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ConnectSchema schema = (ConnectSchema) o;
return Objects.equals(optional, schema.optional) &&
Objects.equals(version, schema.version) &&
Objects.equals(name, schema.name) &&
Objects.equals(doc, schema.doc) &&
Objects.equals(type, schema.type) &&
Objects.deepEquals(defaultValue, schema.defaultValue) &&
Objects.equals(fields, schema.fields) &&
Objects.equals(keySchema, schema.keySchema) &&
Objects.equals(valueSchema, schema.valueSchema) &&
Objects.equals(parameters, schema.parameters);
}
@Override
public int hashCode() {
if (this.hash == null) {
this.hash = Objects.hash(type, optional, defaultValue, fields, keySchema, valueSchema, name, version, doc,
parameters);
}
return this.hash;
}
@Override
public String toString() {
if (name != null)
return "Schema{" + name + ":" + type + "}";
else
return "Schema{" + type + "}";
}
/**
* Get the {@link Schema.Type} associated with the given class.
*
* @param klass the Class whose associated schema type is to be returned
* @return the corresponding type, or null if there is no matching type
*/
public static Type schemaType(Class<?> klass) {
synchronized (JAVA_CLASS_SCHEMA_TYPES) {
Type schemaType = JAVA_CLASS_SCHEMA_TYPES.get(klass);
if (schemaType != null)
return schemaType;
// Since the lookup only checks the class, we need to also try
for (Map.Entry<Class<?>, Type> entry : JAVA_CLASS_SCHEMA_TYPES.entrySet()) {
try {
klass.asSubclass(entry.getKey());
// Cache this for subsequent lookups
JAVA_CLASS_SCHEMA_TYPES.put(klass, entry.getValue());
return entry.getValue();
} catch (ClassCastException e) {
// Expected, ignore
}
}
}
return null;
}
}
| ConnectSchema |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/type/YearTypeHandlerTest.java | {
"start": 982,
"end": 2887
} | class ____ extends BaseTypeHandlerTest {
private static final TypeHandler<Year> TYPE_HANDLER = new YearTypeHandler();
private static final Year INSTANT = Year.now();
@Override
@Test
public void shouldSetParameter() throws Exception {
TYPE_HANDLER.setParameter(ps, 1, INSTANT, null);
verify(ps).setInt(1, INSTANT.getValue());
}
@Override
@Test
public void shouldGetResultFromResultSetByName() throws Exception {
when(rs.getInt("column")).thenReturn(INSTANT.getValue(), 0);
assertEquals(INSTANT, TYPE_HANDLER.getResult(rs, "column"));
assertEquals(Year.of(0), TYPE_HANDLER.getResult(rs, "column"));
}
@Override
@Test
public void shouldGetResultNullFromResultSetByName() throws Exception {
when(rs.getInt("column")).thenReturn(0);
when(rs.wasNull()).thenReturn(true);
assertNull(TYPE_HANDLER.getResult(rs, "column"));
}
@Override
@Test
public void shouldGetResultFromResultSetByPosition() throws Exception {
when(rs.getInt(1)).thenReturn(INSTANT.getValue(), 0);
assertEquals(INSTANT, TYPE_HANDLER.getResult(rs, 1));
assertEquals(Year.of(0), TYPE_HANDLER.getResult(rs, 1));
}
@Override
@Test
public void shouldGetResultNullFromResultSetByPosition() throws Exception {
when(rs.getInt(1)).thenReturn(0);
when(rs.wasNull()).thenReturn(true);
assertNull(TYPE_HANDLER.getResult(rs, 1));
}
@Override
@Test
public void shouldGetResultFromCallableStatement() throws Exception {
when(cs.getInt(1)).thenReturn(INSTANT.getValue(), 0);
assertEquals(INSTANT, TYPE_HANDLER.getResult(cs, 1));
assertEquals(Year.of(0), TYPE_HANDLER.getResult(cs, 1));
}
@Override
@Test
public void shouldGetResultNullFromCallableStatement() throws Exception {
when(cs.getInt(1)).thenReturn(0);
when(cs.wasNull()).thenReturn(true);
assertNull(TYPE_HANDLER.getResult(cs, 1));
}
}
| YearTypeHandlerTest |
java | apache__logging-log4j2 | log4j-api-test/src/main/java/org/apache/logging/log4j/test/junit/UsingThreadContextStack.java | {
"start": 1320,
"end": 1741
} | class ____ using {@link org.apache.logging.log4j.spi.ThreadContextStack} APIs. This will automatically clear and
* restore the thread context stack (NDC) for each test invocation.
*
* @since 2.14.0
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Documented
@Inherited
@ExtendWith(ThreadContextExtension.class)
@ResourceLock(value = Resources.SYSTEM_PROPERTIES, mode = ResourceAccessMode.READ)
public @ | as |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/ClassRelativeResourceLoader.java | {
"start": 1223,
"end": 1717
} | class ____ load resources through
*/
public ClassRelativeResourceLoader(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
this.clazz = clazz;
setClassLoader(clazz.getClassLoader());
}
@Override
protected Resource getResourceByPath(String path) {
return new ClassRelativeContextResource(path, this.clazz);
}
/**
* ClassPathResource that explicitly expresses a context-relative path
* through implementing the ContextResource interface.
*/
private static | to |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/RestrictionRenderingContext.java | {
"start": 205,
"end": 278
} | interface ____ {
String makeParameterMarker();
}
| RestrictionRenderingContext |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ErrorTranslation.java | {
"start": 1922,
"end": 2545
} | class ____ {
/**
* OpenSSL stream closed error: {@value}.
* See HADOOP-19027.
*/
public static final String OPENSSL_STREAM_CLOSED = "WFOPENSSL0035";
/**
* Classname of unshaded Http Client exception: {@value}.
*/
private static final String RAW_NO_HTTP_RESPONSE_EXCEPTION =
"org.apache.http.NoHttpResponseException";
/**
* Classname of shaded Http Client exception: {@value}.
*/
private static final String SHADED_NO_HTTP_RESPONSE_EXCEPTION =
"software.amazon.awssdk.thirdparty.org.apache.http.NoHttpResponseException";
/**
* S3 encryption client exception | ErrorTranslation |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/Case1.java | {
"start": 877,
"end": 1807
} | class ____ extends PoolTestCase {
public void test_f() throws Exception {
final DruidDataSource dataSource = new DruidDataSource();
dataSource.setTimeBetweenConnectErrorMillis(100);
final long startTime = System.currentTimeMillis();
final long okTime = startTime + 1000 * 1;
dataSource.setDriver(new MockDriver() {
@Override
public Connection connect(String url, Properties info) throws SQLException {
if (System.currentTimeMillis() < okTime) {
throw new SQLException();
}
return super.connect(url, info);
}
});
dataSource.setUrl("jdbc:mock:");
dataSource.setMinIdle(0);
dataSource.setMaxActive(2);
dataSource.setMaxIdle(2);
Connection conn = dataSource.getConnection();
conn.close();
dataSource.close();
}
}
| Case1 |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/util/AnnotationUtilsTests.java | {
"start": 28894,
"end": 29068
} | class ____ extends MultiComposedExtensionClass {
}
@ExtendWith("x")
@Extensions({ @ExtendWith("y"), @ExtendWith("z") })
@ExtendWithBar
static | SubMultiComposedExtensionClass |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/BlockingFlowableMostRecent.java | {
"start": 1712,
"end": 2622
} | class ____<T> extends DefaultSubscriber<T> {
volatile Object value;
MostRecentSubscriber(T value) {
this.value = NotificationLite.next(value);
}
@Override
public void onComplete() {
value = NotificationLite.complete();
}
@Override
public void onError(Throwable e) {
value = NotificationLite.error(e);
}
@Override
public void onNext(T args) {
value = NotificationLite.next(args);
}
/**
* The {@link Iterator} return is not thread safe. In other words don't call {@link Iterator#hasNext()} in one
* thread expect {@link Iterator#next()} called from a different thread to work.
* @return the Iterator
*/
public Iterator getIterable() {
return new Iterator();
}
final | MostRecentSubscriber |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/JsonValueSerializationTest.java | {
"start": 5365,
"end": 5530
} | class ____ implements Elem1806 {
public int value = 1;
}
}
// [databind#2822]
@JsonPropertyOrder({ "description", "b" })
static | Impl |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java | {
"start": 26085,
"end": 27350
} | class ____ extends SortingNumericDoubleValues implements ScorerAware {
private final SortedNumericDoubleValues doubleValues;
private final AggregationScript script;
DoubleValues(SortedNumericDoubleValues values, AggregationScript script) {
this.doubleValues = values;
this.script = script;
}
@Override
public void setScorer(Scorable scorer) {
script.setScorer(scorer);
}
@Override
public boolean advanceExact(int target) throws IOException {
if (doubleValues.advanceExact(target)) {
resize(doubleValues.docValueCount());
script.setDocument(target);
for (int i = 0; i < docValueCount(); ++i) {
script.setNextAggregationValue(doubleValues.nextValue());
values[i] = script.runAsDouble();
}
sort();
return true;
}
return false;
}
}
}
public static | DoubleValues |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/util/JsonParserSequence.java | {
"start": 442,
"end": 8805
} | class ____ extends JsonParserDelegate
{
/**
* Parsers other than the first one (which is initially assigned
* as delegate)
*/
protected final JsonParser[] _parsers;
/**
* Configuration that determines whether state of parsers is first verified
* to see if parser already points to a token (that is,
* {@link JsonParser#hasCurrentToken()} returns <code>true</code>), and if so
* that token is first return before {@link JsonParser#nextToken} is called.
* If enabled, this check is made; if disabled, no check is made and
* {@link JsonParser#nextToken} is always called for all parsers.
*<p>
* Default setting is <code>false</code> (for backwards-compatibility)
* so that possible existing token is not considered for parsers.
*/
protected final boolean _checkForExistingToken;
/**
* Index of the next parser in {@link #_parsers}.
*/
protected int _nextParserIndex;
/**
* Flag used to indicate that `JsonParser.nextToken()` should not be called,
* due to parser already pointing to a token.
*/
protected boolean _hasToken;
/*
*******************************************************
* Construction
*******************************************************
*/
protected JsonParserSequence(boolean checkForExistingToken, JsonParser[] parsers)
{
super(parsers[0]);
_checkForExistingToken = checkForExistingToken;
_hasToken = checkForExistingToken && delegate.hasCurrentToken();
_parsers = parsers;
_nextParserIndex = 1;
}
/**
* Method that will construct a sequence (possibly a sequence) that
* contains all given sub-parsers.
* All parsers given are checked to see if they are sequences: and
* if so, they will be "flattened", that is, contained parsers are
* directly added in a new sequence instead of adding sequences
* within sequences. This is done to minimize delegation depth,
* ideally only having just a single level of delegation.
*
* @param checkForExistingToken Flag passed to be assigned as
* {@link #_checkForExistingToken} for resulting sequence
* @param first First parser to traverse
* @param second Second parser to traverse
*
* @return Sequence instance constructed
*/
public static JsonParserSequence createFlattened(boolean checkForExistingToken,
JsonParser first, JsonParser second)
{
if (!(first instanceof JsonParserSequence || second instanceof JsonParserSequence)) {
return new JsonParserSequence(checkForExistingToken,
new JsonParser[] { first, second });
}
ArrayList<JsonParser> p = new ArrayList<>(10);
if (first instanceof JsonParserSequence) {
((JsonParserSequence) first).addFlattenedActiveParsers(p);
} else {
p.add(first);
}
if (second instanceof JsonParserSequence) {
((JsonParserSequence) second).addFlattenedActiveParsers(p);
} else {
p.add(second);
}
return new JsonParserSequence(checkForExistingToken,
p.toArray(new JsonParser[0]));
}
@SuppressWarnings("resource")
protected void addFlattenedActiveParsers(List<JsonParser> listToAddIn)
{
for (int i = _nextParserIndex-1, len = _parsers.length; i < len; ++i) {
JsonParser p = _parsers[i];
if (p instanceof JsonParserSequence) {
((JsonParserSequence) p).addFlattenedActiveParsers(listToAddIn);
} else {
listToAddIn.add(p);
}
}
}
/*
/*******************************************************
/* Overridden methods, needed: cases where default
/* delegation does not work
/*******************************************************
*/
@Override
public void close() throws JacksonException {
do { delegate.close(); } while (switchToNext());
}
@Override
public JsonToken nextToken() throws JacksonException
{
if (delegate == null) {
return null;
}
if (_hasToken) {
_hasToken = false;
return delegate.currentToken();
}
JsonToken t = delegate.nextToken();
if (t == null) {
return switchAndReturnNext();
}
return t;
}
/**
* Need to override, re-implement similar to how method defined in
* {@link tools.jackson.core.base.ParserMinimalBase}, to keep
* state correct here.
*/
@Override
public JsonParser skipChildren() throws JacksonException
{
if ((delegate.currentToken() != JsonToken.START_OBJECT)
&& (delegate.currentToken() != JsonToken.START_ARRAY)) {
return this;
}
int open = 1;
// Since proper matching of start/end markers is handled
// by nextToken(), we'll just count nesting levels here
while (true) {
JsonToken t = nextToken();
if (t == null) { // not ideal but for now, just return
return this;
}
if (t.isStructStart()) {
++open;
} else if (t.isStructEnd()) {
if (--open == 0) {
return this;
}
}
}
}
/*
/*******************************************************
/* And some more methods where default delegation would
/* cause problems with state handling here
/*******************************************************
*/
@Override
public String nextName() throws JacksonException {
// NOTE: call `nextToken()` to handle delegation
return (nextToken() == JsonToken.PROPERTY_NAME) ? currentName() : null;
}
@Override
public boolean nextName(SerializableString str) throws JacksonException {
// NOTE: call `nextToken()` to handle delegation
return (nextToken() == JsonToken.PROPERTY_NAME)
&& str.getValue().equals(currentName());
}
@Override
public int nextNameMatch(PropertyNameMatcher matcher) throws JacksonException {
// NOTE: call `nextToken()` to handle delegation
String str = nextName();
if (str != null) {
// 15-Nov-2017, tatu: Cannot assume intern()ing aspects when delegating...
return matcher.matchName(str);
}
if (hasToken(JsonToken.END_OBJECT)) {
return PropertyNameMatcher.MATCH_END_OBJECT;
}
return PropertyNameMatcher.MATCH_ODD_TOKEN;
}
/*
/*******************************************************
/* Additional extended API
/*******************************************************
*/
/**
* Method that is most useful for debugging or testing;
* returns actual number of underlying parsers sequence
* was constructed with (nor just ones remaining active)
*
* @return Number of actual underlying parsers this sequence has
*/
public int containedParsersCount() {
return _parsers.length;
}
/*
/*******************************************************
/* Helper methods
/*******************************************************
*/
/**
* Method that will switch active delegate parser from the current one
* to the next parser in sequence, if there is another parser left:
* if so, the next parser will become the active delegate parser.
*
* @return True if switch succeeded; false otherwise
*/
protected boolean switchToNext()
{
if (_nextParserIndex < _parsers.length) {
delegate = _parsers[_nextParserIndex++];
return true;
}
return false;
}
protected JsonToken switchAndReturnNext() throws JacksonException
{
while (_nextParserIndex < _parsers.length) {
delegate = _parsers[_nextParserIndex++];
if (_checkForExistingToken && delegate.hasCurrentToken()) {
return delegate.currentToken();
}
JsonToken t = delegate.nextToken();
if (t != null) {
return t;
}
}
return null;
}
}
| JsonParserSequence |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/hierarchies/standard/ClassHierarchyWithMergedConfigLevelTwoTests.java | {
"start": 1543,
"end": 1672
} | class ____ extends ClassHierarchyWithMergedConfigLevelOneTests {
@Configuration
static | ClassHierarchyWithMergedConfigLevelTwoTests |
java | apache__hadoop | hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java | {
"start": 1081,
"end": 1214
} | class ____ getting build-info of the java-vm.
*
*/
@InterfaceAudience.LimitedPrivate({"HBase"})
@InterfaceStability.Unstable
public | for |
java | resilience4j__resilience4j | resilience4j-circuitbreaker/src/test/java/io/github/resilience4j/circuitbreaker/CircuitBreakerConfigTest.java | {
"start": 30950,
"end": 31034
} | class ____ extends ExtendsException {
}
private static | ExtendsExtendsException |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/OrderedClassTests.java | {
"start": 13956,
"end": 13998
} | class ____ {
@Nested
@Order(3)
| Inner |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/temp/TemporaryQueueRouteTest.java | {
"start": 1612,
"end": 2970
} | class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new TransientCamelContextExtension();
protected final String endpointUri = "activemq:temp-queue:TemporaryQueueRouteTest";
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testSendMessage() throws Exception {
MockEndpoint endpoint = getMockEndpoint("mock:result");
endpoint.expectedBodiesReceived("Hello World");
template.sendBody(endpointUri, "Hello World");
endpoint.assertIsSatisfied();
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(endpointUri).to("mock:result");
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
| TemporaryQueueRouteTest |
java | apache__hadoop | hadoop-tools/hadoop-resourceestimator/src/test/java/org/apache/hadoop/resourceestimator/common/serialization/TestResourceSkylineSerDe.java | {
"start": 1613,
"end": 4424
} | class ____ {
/**
* Testing variables.
*/
private Gson gson;
private ResourceSkyline resourceSkyline;
private Resource resource;
private Resource resource2;
private TreeMap<Long, Resource> resourceOverTime;
private RLESparseResourceAllocation skylineList;
@BeforeEach
public final void setup() {
resourceOverTime = new TreeMap<>();
skylineList = new RLESparseResourceAllocation(resourceOverTime,
new DefaultResourceCalculator());
resource = Resource.newInstance(1024 * 100, 100);
resource2 = Resource.newInstance(1024 * 200, 200);
gson = new GsonBuilder()
.registerTypeAdapter(Resource.class, new ResourceSerDe())
.registerTypeAdapter(RLESparseResourceAllocation.class,
new RLESparseResourceAllocationSerDe()).create();
}
@Test public final void testSerialization() {
ReservationInterval riAdd = new ReservationInterval(0, 10);
skylineList.addInterval(riAdd, resource);
riAdd = new ReservationInterval(10, 20);
skylineList.addInterval(riAdd, resource2);
resourceSkyline =
new ResourceSkyline("1", 1024.5, 0, 20, resource, skylineList);
final String json =
gson.toJson(resourceSkyline, new TypeToken<ResourceSkyline>() {
}.getType());
final ResourceSkyline resourceSkylineDe =
gson.fromJson(json, new TypeToken<ResourceSkyline>() {
}.getType());
assertEquals(resourceSkylineDe.getJobId(), resourceSkyline.getJobId());
assertEquals(resourceSkylineDe.getJobInputDataSize(),
resourceSkyline.getJobInputDataSize(), 0);
assertEquals(resourceSkylineDe.getJobSubmissionTime(),
resourceSkyline.getJobSubmissionTime());
assertEquals(resourceSkylineDe.getJobFinishTime(),
resourceSkyline.getJobFinishTime());
assertEquals(resourceSkylineDe.getContainerSpec().getMemorySize(),
resourceSkyline.getContainerSpec().getMemorySize());
assertEquals(resourceSkylineDe.getContainerSpec().getVirtualCores(),
resourceSkyline.getContainerSpec().getVirtualCores());
final RLESparseResourceAllocation skylineList2 =
resourceSkyline.getSkylineList();
final RLESparseResourceAllocation skylineListDe =
resourceSkylineDe.getSkylineList();
for (int i = 0; i < 20; i++) {
assertEquals(skylineList2.getCapacityAtTime(i).getMemorySize(),
skylineListDe.getCapacityAtTime(i).getMemorySize());
assertEquals(skylineList2.getCapacityAtTime(i).getVirtualCores(),
skylineListDe.getCapacityAtTime(i).getVirtualCores());
}
}
@AfterEach
public final void cleanUp() {
gson = null;
resourceSkyline = null;
resourceOverTime.clear();
resourceOverTime = null;
resource = null;
resource2 = null;
skylineList = null;
}
}
| TestResourceSkylineSerDe |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/caching/mocked/CacheAccessTypeTest.java | {
"start": 493,
"end": 2551
} | class ____ {
@Test
@JiraKey( value = "HHH-9844")
public void testExplicitExternalNames() {
assertSame( AccessType.READ_ONLY, AccessType.fromExternalName( "read-only" ) );
assertSame( AccessType.READ_WRITE, AccessType.fromExternalName( "read-write" ) );
assertSame( AccessType.NONSTRICT_READ_WRITE, AccessType.fromExternalName( "nonstrict-read-write" ) );
assertSame( AccessType.TRANSACTIONAL, AccessType.fromExternalName( "transactional" ) );
}
@Test
@JiraKey( value = "HHH-9844")
public void testEnumNames() {
assertSame( AccessType.READ_ONLY, AccessType.fromExternalName( "READ_ONLY" ) );
assertSame( AccessType.READ_WRITE, AccessType.fromExternalName( "READ_WRITE" ) );
assertSame( AccessType.NONSTRICT_READ_WRITE, AccessType.fromExternalName( "NONSTRICT_READ_WRITE" ) );
assertSame( AccessType.TRANSACTIONAL, AccessType.fromExternalName( "TRANSACTIONAL" ) );
}
@Test
@JiraKey( value = "HHH-9844")
public void testLowerCaseEnumNames() {
assertSame( AccessType.READ_ONLY, AccessType.fromExternalName( "read_only" ) );
assertSame( AccessType.READ_WRITE, AccessType.fromExternalName( "read_write" ) );
assertSame( AccessType.NONSTRICT_READ_WRITE, AccessType.fromExternalName( "nonstrict_read_write" ) );
assertSame( AccessType.TRANSACTIONAL, AccessType.fromExternalName( "transactional" ) );
}
@Test
@JiraKey( value = "HHH-9844")
public void testUpperCaseWithHyphens() {
try {
AccessType.fromExternalName( "READ-ONLY" );
fail( "should have failed because upper-case using hyphans is not supported." );
}
catch (UnknownAccessTypeException ex) {
// expected
}
try {
AccessType.fromExternalName( "READ-WRITE" );
fail( "should have failed because upper-case using hyphans is not supported." );
}
catch (UnknownAccessTypeException ex) {
// expected
}
try {
AccessType.fromExternalName( "NONSTRICT-READ-WRITE" );
fail( "should have failed because upper-case using hyphans is not supported." );
}
catch (UnknownAccessTypeException ex) {
// expected
}
}
}
| CacheAccessTypeTest |
java | micronaut-projects__micronaut-core | http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/filter/options/OptionsFilterTest.java | {
"start": 4949,
"end": 5326
} | class ____ {
@Get("/foo/{id}")
@Status(HttpStatus.OK)
public void fooGet(String id) {
}
@Post("/foo/{id}")
@Status(HttpStatus.CREATED)
public void fooPost(String id) {
}
@Options("/options/route")
@Status(HttpStatus.I_AM_A_TEAPOT)
public void optionsRoute() {
}
}
}
| MyController |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/inject/InjectableBeanDefinition.java | {
"start": 1073,
"end": 1952
} | interface ____<T> extends BeanDefinition<T> {
/**
* Inject the given bean with the context.
*
* @param context The context
* @param bean The bean
* @return The injected bean
*/
@NonNull
default T inject(@NonNull BeanContext context, @NonNull T bean) {
try (DefaultBeanResolutionContext resolutionContext = new DefaultBeanResolutionContext(context, this)) {
return inject(resolutionContext, context, bean);
}
}
/**
* Inject the given bean with the context.
*
* @param resolutionContext the resolution context
* @param context The context
* @param bean The bean
* @return The injected bean
*/
@NonNull
T inject(@NonNull BeanResolutionContext resolutionContext, @NonNull BeanContext context, @NonNull T bean);
}
| InjectableBeanDefinition |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ExecutionConditionTests.java | {
"start": 6487,
"end": 7290
} | class ____ {
@Test
void enabledTest() {
}
@Test
@Disabled
@DeactivatedConditions
void atDisabledTest() {
fail("this should be @Disabled");
}
@Test
@SystemProperty(key = FOO, value = BAR)
void systemPropertyEnabledTest() {
}
@Test
@DeactivatedConditions
@SystemProperty(key = FOO, value = BOGUS)
void systemPropertyWithIncorrectValueTest() {
fail("this should be disabled");
}
@Test
@DeactivatedConditions
@SystemProperty(key = BOGUS, value = "doesn't matter")
void systemPropertyNotSetTest() {
fail("this should be disabled");
}
}
@Target({ ElementType.METHOD, ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
@ExtendWith({ AlwaysDisabledCondition.class, AnotherAlwaysDisabledCondition.class })
@ | TestCaseWithExecutionConditionOnMethods |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/RobertaTokenizer.java | {
"start": 6604,
"end": 8631
} | class ____ {
protected final List<String> originalVocab;
protected final List<String> merges;
protected final SortedMap<String, Integer> vocab;
protected boolean withSpecialTokens;
protected boolean prefixSpace;
protected int maxSequenceLength;
protected Set<String> neverSplit;
protected Builder(List<String> vocab, List<String> merges, RobertaTokenization tokenization) {
this.originalVocab = vocab;
this.vocab = buildSortedVocab(vocab);
this.merges = merges;
this.prefixSpace = tokenization.isAddPrefixSpace();
this.withSpecialTokens = tokenization.withSpecialTokens();
this.maxSequenceLength = tokenization.maxSequenceLength();
}
private static SortedMap<String, Integer> buildSortedVocab(List<String> vocab) {
SortedMap<String, Integer> sortedVocab = new TreeMap<>();
for (int i = 0; i < vocab.size(); i++) {
sortedVocab.put(vocab.get(i), i);
}
return sortedVocab;
}
public Builder setNeverSplit(Set<String> neverSplit) {
this.neverSplit = neverSplit;
return this;
}
public Builder setMaxSequenceLength(int maxSequenceLength) {
this.maxSequenceLength = maxSequenceLength;
return this;
}
/**
* Include CLS and SEP tokens
* @param withSpecialTokens if true include CLS and SEP tokens
* @return this
*/
public Builder setWithSpecialTokens(boolean withSpecialTokens) {
this.withSpecialTokens = withSpecialTokens;
return this;
}
public RobertaTokenizer build() {
if (neverSplit == null) {
neverSplit = Collections.emptySet();
}
return new RobertaTokenizer(originalVocab, vocab, merges, prefixSpace, withSpecialTokens, maxSequenceLength, neverSplit);
}
}
}
| Builder |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/AssertNotNull.java | {
"start": 697,
"end": 1437
} | class ____ {
private AssertNotNull() {
/* no-op */
}
@Contract("null -> fail")
static void assertNotNull(@Nullable Object actual) {
assertNotNull(actual, (String) null);
}
@Contract("null, _ -> fail")
static void assertNotNull(@Nullable Object actual, @Nullable String message) {
if (actual == null) {
failNull(message);
}
}
@Contract("null, _ -> fail")
static void assertNotNull(@Nullable Object actual, Supplier<@Nullable String> messageSupplier) {
if (actual == null) {
failNull(messageSupplier);
}
}
private static void failNull(@Nullable Object messageOrSupplier) {
assertionFailure() //
.message(messageOrSupplier) //
.reason("expected: not <null>") //
.buildAndThrow();
}
}
| AssertNotNull |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/MonoEmptyTest.java | {
"start": 844,
"end": 1139
} | class ____ {
@Test
public void normal() {
StepVerifier.create(Mono.empty())
.verifyComplete();
}
@Test
public void scanOperator(){
MonoEmpty test = new MonoEmpty();
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
}
| MonoEmptyTest |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/util/reflection/BeanPropertySetterTest.java | {
"start": 4352,
"end": 4509
} | class ____ {
private File theField;
public File getTheField() {
return theField;
}
}
static | SomeBeanWithJustAGetter |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/DiscoverySelector.java | {
"start": 929,
"end": 1632
} | interface ____ {
/**
* Return the {@linkplain DiscoverySelectorIdentifier identifier} of this
* selector.
*
* <p>The returned identifier must be parsable by a corresponding
* {@link DiscoverySelectorIdentifierParser}.
*
* <p>The default implementation returns {@link Optional#empty()}. Can be
* overridden by concrete implementations.
*
* @return an {@link Optional} containing the identifier of this selector;
* never {@code null} but potentially empty if the selector does not support
* identifiers
* @since 1.11
*/
@API(status = MAINTAINED, since = "1.13.3")
default Optional<DiscoverySelectorIdentifier> toIdentifier() {
return Optional.empty();
}
}
| DiscoverySelector |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java | {
"start": 8914,
"end": 47047
} | class ____ extends TransportTasksAction<Task, TestTasksRequest, TestTasksResponse, TestTaskResponse> {
protected TestTasksAction(String actionName, ClusterService clusterService, TransportService transportService) {
super(
actionName,
clusterService,
transportService,
new ActionFilters(new HashSet<>()),
TestTasksRequest::new,
TestTaskResponse::new,
transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT)
);
}
@Override
protected TestTasksResponse newResponse(
TestTasksRequest request,
List<TestTaskResponse> tasks,
List<TaskOperationFailure> taskOperationFailures,
List<FailedNodeException> failedNodeExceptions
) {
return new TestTasksResponse(tasks, taskOperationFailures, failedNodeExceptions);
}
}
private ActionFuture<NodesResponse> startBlockingTestNodesAction(CountDownLatch checkLatch) throws Exception {
return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request"));
}
private ActionFuture<NodesResponse> startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request) throws Exception {
PlainActionFuture<NodesResponse> future = new PlainActionFuture<>();
startBlockingTestNodesAction(checkLatch, request, future);
return future;
}
private Task startBlockingTestNodesAction(CountDownLatch checkLatch, ActionListener<NodesResponse> listener) throws Exception {
return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request"), listener);
}
private Task startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request, ActionListener<NodesResponse> listener)
throws Exception {
CountDownLatch actionLatch = new CountDownLatch(nodesCount);
TestNodesAction[] actions = new TestNodesAction[nodesCount];
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
actions[i] = new TestNodesAction(
"internal:testAction",
threadPool,
testNodes[i].clusterService,
testNodes[i].transportService
) {
@Override
protected NodeResponse nodeOperation(NodeRequest request, Task task) {
logger.info("Action on node {}", node);
actionLatch.countDown();
try {
checkLatch.await();
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
logger.info("Action on node {} finished", node);
return new NodeResponse(testNodes[node].discoveryNode());
}
};
}
// Make sure no tasks are running
for (TestNode node : testNodes) {
assertBusy(() -> assertEquals(0, node.transportService.getTaskManager().getTasks().size()));
}
Task task = testNodes[0].transportService.getTaskManager()
.registerAndExecute("transport", actions[0], request, testNodes[0].transportService.getLocalNodeConnection(), listener);
logger.info("Awaiting for all actions to start");
assertTrue(actionLatch.await(10, TimeUnit.SECONDS));
logger.info("Done waiting for all actions to start");
return task;
}
public void testRunningTasksCount() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
CountDownLatch responseLatch = new CountDownLatch(1);
final AtomicReference<NodesResponse> responseReference = new AtomicReference<>();
Task mainTask = startBlockingTestNodesAction(checkLatch, new ActionListener<>() {
@Override
public void onResponse(NodesResponse listTasksResponse) {
responseReference.set(listTasksResponse);
responseLatch.countDown();
}
@Override
public void onFailure(Exception e) {
logger.warn("Couldn't get list of tasks", e);
responseLatch.countDown();
}
});
// Check task counts using taskManager
Map<Long, Task> localTasks = testNodes[0].transportService.getTaskManager().getTasks();
logger.info(
"local tasks [{}]",
localTasks.values()
.stream()
.map(t -> Strings.toString(t.taskInfo(testNodes[0].getNodeId(), true)))
.collect(Collectors.joining(","))
);
assertEquals(2, localTasks.size()); // all node tasks + 1 coordinating task
Task coordinatingTask = localTasks.get(Collections.min(localTasks.keySet()));
Task subTask = localTasks.get(Collections.max(localTasks.keySet()));
assertThat(subTask.getAction(), endsWith("[n]"));
assertThat(coordinatingTask.getAction(), not(endsWith("[n]")));
for (int i = 1; i < testNodes.length; i++) {
Map<Long, Task> remoteTasks = testNodes[i].transportService.getTaskManager().getTasks();
assertEquals(1, remoteTasks.size());
Task remoteTask = remoteTasks.values().iterator().next();
assertThat(remoteTask.getAction(), endsWith("[n]"));
}
// Check task counts using transport
int testNodeNum = randomIntBetween(0, testNodes.length - 1);
TestNode testNode = testNodes[testNodeNum];
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("internal:testAction*"); // pick all test actions
logger.info("Listing currently running tasks using node [{}]", testNodeNum);
ListTasksResponse response = ActionTestUtils.executeBlocking(testNode.transportListTasksAction, listTasksRequest);
logger.info("Checking currently running tasks");
assertEquals(testNodes.length, response.getPerNodeTasks().size());
// Coordinating node
assertEquals(2, response.getPerNodeTasks().get(testNodes[0].getNodeId()).size());
// Other nodes node
for (int i = 1; i < testNodes.length; i++) {
assertEquals(1, response.getPerNodeTasks().get(testNodes[i].getNodeId()).size());
}
// There should be a single main task when grouped by tasks
assertEquals(1, response.getTaskGroups().size());
// And as many child tasks as we have nodes
assertEquals(testNodes.length, response.getTaskGroups().get(0).childTasks().size());
// Check task counts using transport with filtering
testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("internal:testAction[n]"); // only pick node actions
response = ActionTestUtils.executeBlocking(testNode.transportListTasksAction, listTasksRequest);
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry<String, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
assertEquals(1, entry.getValue().size());
assertNull(entry.getValue().get(0).description());
}
// Since the main task is not in the list - all tasks should be by themselves
assertEquals(testNodes.length, response.getTaskGroups().size());
for (TaskGroup taskGroup : response.getTaskGroups()) {
assertEquals(0, taskGroup.childTasks().size());
}
// Check task counts using transport with detailed description
listTasksRequest.setDetailed(true); // same request only with detailed description
response = ActionTestUtils.executeBlocking(testNode.transportListTasksAction, listTasksRequest);
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry<String, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
assertEquals(1, entry.getValue().size());
assertEquals("CancellableNodeRequest[Test Request]", entry.getValue().get(0).description());
}
// Make sure that the main task on coordinating node is the task that was returned to us by execute()
listTasksRequest.setActions("internal:testAction"); // only pick the main task
response = ActionTestUtils.executeBlocking(testNode.transportListTasksAction, listTasksRequest);
assertEquals(1, response.getTasks().size());
assertEquals(mainTask.getId(), response.getTasks().get(0).id());
// Release all tasks and wait for response
checkLatch.countDown();
assertTrue(responseLatch.await(10, TimeUnit.SECONDS));
NodesResponse responses = responseReference.get();
assertEquals(0, responses.failureCount());
// Make sure that we don't have any lingering tasks
for (TestNode node : testNodes) {
assertBusy(() -> assertEquals(0, node.transportService.getTaskManager().getTasks().size()));
}
}
public void testFindChildTasks() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
// Get the parent task
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("internal:testAction");
ListTasksResponse response = ActionTestUtils.executeBlocking(testNode.transportListTasksAction, listTasksRequest);
assertEquals(1, response.getTasks().size());
String parentNode = response.getTasks().get(0).taskId().getNodeId();
long parentTaskId = response.getTasks().get(0).id();
// Find tasks with common parent
listTasksRequest = new ListTasksRequest();
listTasksRequest.setTargetParentTaskId(new TaskId(parentNode, parentTaskId));
response = ActionTestUtils.executeBlocking(testNode.transportListTasksAction, listTasksRequest);
assertEquals(testNodes.length, response.getTasks().size());
for (TaskInfo task : response.getTasks()) {
assertEquals("internal:testAction[n]", task.action());
assertEquals(parentNode, task.parentTaskId().getNodeId());
assertEquals(parentTaskId, task.parentTaskId().getId());
}
// Release all tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
public void testTasksDescriptions() throws Exception {
long minimalStartTime = System.currentTimeMillis();
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
long maximumStartTimeNanos = System.nanoTime();
// Check task counts using transport with filtering
TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("internal:testAction[n]"); // only pick node actions
ListTasksResponse response = ActionTestUtils.executeBlocking(testNode.transportListTasksAction, listTasksRequest);
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry<String, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
assertEquals(1, entry.getValue().size());
assertNull(entry.getValue().get(0).description());
}
// Check task counts using transport with detailed description
long minimalDurationNanos = System.nanoTime() - maximumStartTimeNanos;
listTasksRequest.setDetailed(true); // same request only with detailed description
response = ActionTestUtils.executeBlocking(testNode.transportListTasksAction, listTasksRequest);
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry<String, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
assertEquals(1, entry.getValue().size());
assertEquals("CancellableNodeRequest[Test Request]", entry.getValue().get(0).description());
assertThat(entry.getValue().get(0).startTime(), greaterThanOrEqualTo(minimalStartTime));
assertThat(entry.getValue().get(0).runningTimeNanos(), greaterThanOrEqualTo(minimalDurationNanos));
}
// Release all tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
public void testCancellingTasksThatDontSupportCancellation() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
CountDownLatch responseLatch = new CountDownLatch(1);
Task task = startBlockingTestNodesAction(checkLatch, ActionListener.running(responseLatch::countDown));
String actionName = "internal:testAction"; // only pick the main action
// Try to cancel main task using action name
CancelTasksRequest request = new CancelTasksRequest();
request.setNodes(testNodes[0].getNodeId());
request.setReason("Testing Cancellation");
request.setActions(actionName);
ListTasksResponse response = ActionTestUtils.executeBlocking(
testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction,
request
);
// Shouldn't match any tasks since testAction doesn't support cancellation
assertEquals(0, response.getTasks().size());
assertEquals(0, response.getTaskFailures().size());
assertEquals(0, response.getNodeFailures().size());
// Try to cancel main task using id
request = new CancelTasksRequest();
request.setReason("Testing Cancellation");
request.setTargetTaskId(new TaskId(testNodes[0].getNodeId(), task.getId()));
response = ActionTestUtils.executeBlocking(
testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction,
request
);
// Shouldn't match any tasks since testAction doesn't support cancellation
assertEquals(0, response.getTasks().size());
assertEquals(0, response.getTaskFailures().size());
assertEquals(1, response.getNodeFailures().size());
assertThat(response.getNodeFailures().get(0).getDetailedMessage(), containsString("doesn't support cancellation"));
// Make sure that task is still running
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions(actionName);
ListTasksResponse listResponse = ActionTestUtils.executeBlocking(
testNodes[randomIntBetween(0, testNodes.length - 1)].transportListTasksAction,
listTasksRequest
);
assertEquals(1, listResponse.getPerNodeTasks().size());
// Verify that tasks are marked as non-cancellable
for (TaskInfo taskInfo : listResponse.getTasks()) {
assertFalse(taskInfo.cancellable());
}
// Release all tasks and wait for response
checkLatch.countDown();
responseLatch.await(10, TimeUnit.SECONDS);
}
public void testFailedTasksCount() throws Exception {
Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build();
setupTestNodes(settings);
connectNodes(testNodes);
TestNodesAction[] actions = new TestNodesAction[nodesCount];
RecordingTaskManagerListener[] listeners = setupListeners(testNodes, "internal:testAction*");
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
actions[i] = new TestNodesAction(
"internal:testAction",
threadPool,
testNodes[i].clusterService,
testNodes[i].transportService
) {
@Override
protected NodeResponse nodeOperation(NodeRequest request, Task task) {
TransportTasksActionTests.this.logger.info("Action on node {}", node);
throw new RuntimeException("Test exception");
}
};
}
// Since https://github.com/elastic/elasticsearch/pull/94865 task unregistration is not guaranteed to have happened upon
// receiving the response, e.g. for a `internal:transport/handshake` when connecting the test nodes. Therefore, wait
// for ongoing tasks to finish.
assertBusy(() -> {
final List<String> ongoingTaskDescriptions = getAllTaskDescriptions();
assertThat("Ongoing tasks:" + ongoingTaskDescriptions, ongoingTaskDescriptions.size(), equalTo(0));
});
NodesRequest request = new NodesRequest("Test Request");
NodesResponse responses = ActionTestUtils.executeBlockingWithTask(
testNodes[0].transportService.getTaskManager(),
testNodes[0].transportService.getLocalNodeConnection(),
actions[0],
request
);
assertEquals(nodesCount, responses.failureCount());
// Make sure that actions are still registered in the task manager on all nodes
// Twice on the coordinating node and once on all other nodes.
assertBusy(() -> {
assertEquals(2, listeners[0].getRegistrationEvents().size());
assertEquals(2, listeners[0].getUnregistrationEvents().size());
for (int i = 1; i < listeners.length; i++) {
assertEquals(1, listeners[i].getRegistrationEvents().size());
assertEquals(1, listeners[i].getUnregistrationEvents().size());
}
});
}
private List<String> getAllTaskDescriptions() {
List<String> taskDescriptions = new ArrayList<>();
for (TestNode testNode : testNodes) {
for (Task task : testNode.transportService.getTaskManager().getTasks().values()) {
taskDescriptions.add(
Strings.format(
"node [%s]: task [id:%d][%s] started at %d",
testNode.getNodeId(),
task.getId(),
task.getAction(),
task.getStartTime()
)
);
}
}
return taskDescriptions;
}
public void testActionParentCancellationPropagates() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
CountDownLatch taskLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
int numNodes = 2;
CountDownLatch taskExecutesLatch = new CountDownLatch(numNodes);
TestTasksAction[] tasksActions = new TestTasksAction[numNodes];
for (int j = 0; j < numNodes; j++) {
final int nodeId = j;
tasksActions[j] = new TestTasksAction(
"internal:testTasksAction",
testNodes[nodeId].clusterService,
testNodes[nodeId].transportService
) {
@Override
protected void taskOperation(
CancellableTask actionTask,
TestTasksRequest request,
Task task,
ActionListener<TestTaskResponse> listener
) {
try {
taskExecutesLatch.countDown();
logger.info("Task handled on node {} {}", nodeId, actionTask);
taskLatch.await();
assertThat(actionTask, instanceOf(CancellableTask.class));
logger.info("Task is now proceeding with cancellation check {}", nodeId);
assertBusy(() -> assertTrue(actionTask.isCancelled()));
listener.onResponse(new TestTaskResponse("CANCELLED"));
} catch (Exception e) {
listener.onFailure(e);
fail(e.getMessage());
}
}
};
}
TestTasksRequest testTasksRequest = new TestTasksRequest();
testTasksRequest.setActions("internal:testAction[n]"); // pick all test actions
testTasksRequest.setNodes(testNodes[0].getNodeId(), testNodes[1].getNodeId()); // only first two nodes
PlainActionFuture<TestTasksResponse> taskFuture = new PlainActionFuture<>();
CancellableTask task = (CancellableTask) testNodes[0].transportService.getTaskManager()
.registerAndExecute(
"direct",
tasksActions[0],
testTasksRequest,
testNodes[0].transportService.getLocalNodeConnection(),
taskFuture
);
logger.info("Executing test task request and awaiting their execution");
taskExecutesLatch.await();
logger.info("All test tasks are now executing");
PlainActionFuture<Void> cancellationFuture = new PlainActionFuture<>();
logger.info("Cancelling tasks");
testNodes[0].transportService.getTaskManager().cancelTaskAndDescendants(task, "test case", false, cancellationFuture);
logger.info("Awaiting task cancellation");
cancellationFuture.actionGet();
logger.info("Parent task is now cancelled counting down task latch");
taskLatch.countDown();
expectThrows(TaskCancelledException.class, taskFuture);
// Release all node tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
public void testTaskResponsesDiscardedOnCancellation() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch blockedActionLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(blockedActionLatch);
final var taskResponseListeners = new LinkedBlockingQueue<ActionListener<TestTaskResponse>>();
final var taskResponseListenersCountDown = new CountDownLatch(2); // test action plus the list[n] action
final TestTasksAction tasksAction = new TestTasksAction(
"internal:testTasksAction",
testNodes[0].clusterService,
testNodes[0].transportService
) {
@Override
protected void taskOperation(
CancellableTask actionTask,
TestTasksRequest request,
Task task,
ActionListener<TestTaskResponse> listener
) {
taskResponseListeners.add(listener);
taskResponseListenersCountDown.countDown();
}
};
TestTasksRequest testTasksRequest = new TestTasksRequest();
testTasksRequest.setNodes(testNodes[0].getNodeId()); // only local node
PlainActionFuture<TestTasksResponse> taskFuture = new PlainActionFuture<>();
CancellableTask task = (CancellableTask) testNodes[0].transportService.getTaskManager()
.registerAndExecute(
"direct",
tasksAction,
testTasksRequest,
testNodes[0].transportService.getLocalNodeConnection(),
taskFuture
);
safeAwait(taskResponseListenersCountDown);
final var reachabilityChecker = new ReachabilityChecker();
final var listener0 = Objects.requireNonNull(taskResponseListeners.poll());
if (randomBoolean()) {
listener0.onResponse(reachabilityChecker.register(new TestTaskResponse("status")));
} else {
listener0.onFailure(reachabilityChecker.register(new ElasticsearchException("simulated")));
}
reachabilityChecker.checkReachable();
safeAwait(
(ActionListener<Void> l) -> testNodes[0].transportService.getTaskManager().cancelTaskAndDescendants(task, "test", false, l)
);
reachabilityChecker.ensureUnreachable();
while (true) {
final var listener = taskResponseListeners.poll();
if (listener == null) {
break;
}
if (randomBoolean()) {
listener.onResponse(reachabilityChecker.register(new TestTaskResponse("status")));
} else {
listener.onFailure(reachabilityChecker.register(new ElasticsearchException("simulated")));
}
reachabilityChecker.ensureUnreachable();
}
expectThrows(TaskCancelledException.class, taskFuture);
blockedActionLatch.countDown();
NodesResponse responses = future.get(10, TimeUnit.SECONDS);
assertEquals(0, responses.failureCount());
}
public void testNodeResponsesDiscardedOnCancellation() {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
final var taskResponseListeners = new AtomicReferenceArray<ActionListener<TestTaskResponse>>(testNodes.length);
final var taskResponseListenersCountDown = new CountDownLatch(testNodes.length); // one list[n] action per node
final var tasksActions = new TestTasksAction[testNodes.length];
for (int i = 0; i < testNodes.length; i++) {
final var nodeIndex = i;
tasksActions[i] = new TestTasksAction("internal:testTasksAction", testNodes[i].clusterService, testNodes[i].transportService) {
@Override
protected void taskOperation(
CancellableTask actionTask,
TestTasksRequest request,
Task task,
ActionListener<TestTaskResponse> listener
) {
assertThat(taskResponseListeners.getAndSet(nodeIndex, ActionListener.notifyOnce(listener)), nullValue());
taskResponseListenersCountDown.countDown();
}
};
}
TestTasksRequest testTasksRequest = new TestTasksRequest();
testTasksRequest.setActions("internal:testTasksAction[n]");
PlainActionFuture<TestTasksResponse> taskFuture = new PlainActionFuture<>();
CancellableTask task = (CancellableTask) testNodes[0].transportService.getTaskManager()
.registerAndExecute(
"direct",
tasksActions[0],
testTasksRequest,
testNodes[0].transportService.getLocalNodeConnection(),
taskFuture
);
safeAwait(taskResponseListenersCountDown);
final var reachabilityChecker = new ReachabilityChecker();
if (randomBoolean()) {
// local node does not de/serialize node-level response so retains references to the task-level response
if (randomBoolean()) {
taskResponseListeners.get(0).onResponse(reachabilityChecker.register(new TestTaskResponse("status")));
} else {
taskResponseListeners.get(0).onFailure(reachabilityChecker.register(new ElasticsearchException("simulated")));
}
reachabilityChecker.checkReachable();
}
safeAwait(
(ActionListener<Void> l) -> testNodes[0].transportService.getTaskManager().cancelTaskAndDescendants(task, "test", false, l)
);
reachabilityChecker.ensureUnreachable();
assertFalse(taskFuture.isDone());
for (int i = 0; i < testNodes.length; i++) {
if (randomBoolean()) {
taskResponseListeners.get(i).onResponse(reachabilityChecker.register(new TestTaskResponse("status")));
} else {
taskResponseListeners.get(i).onFailure(reachabilityChecker.register(new ElasticsearchException("simulated")));
}
reachabilityChecker.ensureUnreachable();
}
expectThrows(TaskCancelledException.class, taskFuture);
}
public void testTaskLevelActionFailures() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
TestTasksAction[] tasksActions = new TestTasksAction[nodesCount];
final int failTaskOnNode = randomIntBetween(1, nodesCount - 1);
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
// Simulate task action that fails on one of the tasks on one of the nodes
tasksActions[i] = new TestTasksAction("internal:testTasksAction", testNodes[i].clusterService, testNodes[i].transportService) {
@Override
protected void taskOperation(
CancellableTask actionTask,
TestTasksRequest request,
Task task,
ActionListener<TestTaskResponse> listener
) {
logger.info("Task action on node {}", node);
if (failTaskOnNode == node && task.getParentTaskId().isSet()) {
logger.info("Failing on node {}", node);
// Fail in a random way to make sure we can handle all these ways
Runnable failureMode = randomFrom(() -> {
logger.info("Throwing exception from taskOperation");
throw new RuntimeException("Task level failure (direct)");
}, () -> {
logger.info("Calling listener synchronously with exception from taskOperation");
listener.onFailure(new RuntimeException("Task level failure (sync listener)"));
}, () -> {
logger.info("Calling listener asynchronously with exception from taskOperation");
threadPool.generic()
.execute(() -> listener.onFailure(new RuntimeException("Task level failure (async listener)")));
});
failureMode.run();
} else {
if (randomBoolean()) {
listener.onResponse(new TestTaskResponse("Success on node (sync)" + node));
} else {
threadPool.generic().execute(() -> listener.onResponse(new TestTaskResponse("Success on node (async)" + node)));
}
}
}
};
}
// Run task action on node tasks that are currently running
// should be successful on all nodes except one
TestTasksRequest testTasksRequest = new TestTasksRequest();
testTasksRequest.setActions("internal:testAction[n]"); // pick all test actions
TestTasksResponse response = ActionTestUtils.executeBlocking(tasksActions[0], testTasksRequest);
assertThat(response.getTaskFailures(), hasSize(1)); // one task failed
assertThat(response.getTaskFailures().get(0).getCause().getMessage(), containsString("Task level failure"));
// Get successful responses from all nodes except one
assertEquals(testNodes.length - 1, response.tasks.size());
assertEquals(0, response.getNodeFailures().size()); // no nodes failed
// Release all node tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
@SuppressWarnings("unchecked")
public void testTasksToXContentGrouping() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
// Get the parent task
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions(TransportListTasksAction.TYPE.name() + "*");
ListTasksResponse response = ActionTestUtils.executeBlockingWithTask(
testNodes[0].transportService.getTaskManager(),
testNodes[0].transportService.getLocalNodeConnection(),
testNodes[0].transportListTasksAction,
listTasksRequest
);
assertEquals(testNodes.length + 1, response.getTasks().size());
Map<String, Object> byNodes = serialize(response, true);
byNodes = (Map<String, Object>) byNodes.get("nodes");
// One element on the top level
assertEquals(testNodes.length, byNodes.size());
Map<String, Object> firstNode = (Map<String, Object>) byNodes.get(testNodes[0].getNodeId());
firstNode = (Map<String, Object>) firstNode.get("tasks");
assertEquals(2, firstNode.size()); // two tasks for the first node
for (int i = 1; i < testNodes.length; i++) {
Map<String, Object> otherNode = (Map<String, Object>) byNodes.get(testNodes[i].getNodeId());
otherNode = (Map<String, Object>) otherNode.get("tasks");
assertEquals(1, otherNode.size()); // one tasks for the all other nodes
}
// Group by parents
Map<String, Object> byParent = serialize(response, false);
byParent = (Map<String, Object>) byParent.get("tasks");
// One element on the top level
assertEquals(1, byParent.size()); // Only one top level task
Map<String, Object> topTask = (Map<String, Object>) byParent.values().iterator().next();
List<Object> children = (List<Object>) topTask.get("children");
assertEquals(testNodes.length, children.size()); // two tasks for the first node
for (int i = 0; i < testNodes.length; i++) {
Map<String, Object> child = (Map<String, Object>) children.get(i);
assertNull(child.get("children"));
}
}
private Map<String, Object> serialize(ListTasksResponse response, boolean byParents) throws IOException {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
if (byParents) {
DiscoveryNodes nodes = testNodes[0].clusterService.state().nodes();
ChunkedToXContent.wrapAsToXContent(response.groupedByNode(() -> nodes)).toXContent(builder, ToXContent.EMPTY_PARAMS);
} else {
ChunkedToXContent.wrapAsToXContent(response.groupedByParent()).toXContent(builder, ToXContent.EMPTY_PARAMS);
}
builder.flush();
logger.info(Strings.toString(builder));
return XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2();
}
public void testRefCounting() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
final var firstNodeListeners = new SubscribableListener<TestTaskResponse>();
final var otherNodeListeners = new SubscribableListener<TestTaskResponse>();
TestTasksAction[] tasksActions = new TestTasksAction[nodesCount];
for (int nodeId = 0; nodeId < nodesCount; nodeId++) {
final var listeners = nodeId == 0 ? firstNodeListeners : otherNodeListeners;
tasksActions[nodeId] = new TestTasksAction(
"internal:testTasksAction",
testNodes[nodeId].clusterService,
testNodes[nodeId].transportService
) {
@Override
protected void taskOperation(
CancellableTask actionTask,
TestTasksRequest request,
Task task,
ActionListener<TestTaskResponse> listener
) {
request.incRef();
listeners.addListener(ActionListener.runBefore(listener, request::decRef));
}
};
}
final var requestReleaseFuture = new PlainActionFuture<Void>();
TestTasksRequest testTasksRequest = new TestTasksRequest(() -> requestReleaseFuture.onResponse(null));
testTasksRequest.setActions("internal:testAction[n]"); // pick all test actions
testTasksRequest.setNodes(testNodes[0].getNodeId(), testNodes[1].getNodeId()); // only first two nodes
final var taskFuture = new PlainActionFuture<TestTasksResponse>();
testNodes[0].transportService.getTaskManager()
.registerAndExecute(
"direct",
tasksActions[0],
testTasksRequest,
testNodes[0].transportService.getLocalNodeConnection(),
taskFuture
);
testTasksRequest.decRef();
assertTrue(testTasksRequest.hasReferences());
firstNodeListeners.onResponse(new TestTaskResponse("done"));
assertNull(requestReleaseFuture.get(10, TimeUnit.SECONDS));
assertFalse(testTasksRequest.hasReferences());
assertFalse(taskFuture.isDone());
otherNodeListeners.onResponse(new TestTaskResponse("done"));
taskFuture.get(10, TimeUnit.SECONDS);
// Release all node tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
}
| TestTasksAction |
java | apache__camel | components/camel-thrift/src/test/java/org/apache/camel/component/thrift/generated/Calculator.java | {
"start": 237077,
"end": 237359
} | class ____ implements org.apache.thrift.scheme.SchemeFactory {
@Override
public alltypes_argsStandardScheme getScheme() {
return new alltypes_argsStandardScheme();
}
}
private static | alltypes_argsStandardSchemeFactory |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/BreakableService.java | {
"start": 1279,
"end": 3259
} | class ____ extends AbstractService {
private boolean failOnInit;
private boolean failOnStart;
private boolean failOnStop;
private int[] counts = new int[4];
public BreakableService() {
this(false, false, false);
}
public BreakableService(boolean failOnInit,
boolean failOnStart,
boolean failOnStop) {
super("BreakableService");
this.failOnInit = failOnInit;
this.failOnStart = failOnStart;
this.failOnStop = failOnStop;
inc(STATE.NOTINITED);
}
private int convert(STATE state) {
return state.getValue();
}
private void inc(STATE state) {
int index = convert(state);
counts[index] ++;
}
public int getCount(STATE state) {
return counts[convert(state)];
}
private void maybeFail(boolean fail, String action) throws Exception {
if (fail) {
throw createFailureException(action);
}
}
/**
* Override point: create the exception to raise
* @param action action in progress
* @return the exception that will be thrown
*/
protected Exception createFailureException(String action) {
return new BrokenLifecycleEvent(this, action);
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
inc(STATE.INITED);
maybeFail(failOnInit, "init");
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
inc(STATE.STARTED);
maybeFail(failOnStart, "start");
}
@Override
protected void serviceStop() throws Exception {
inc(STATE.STOPPED);
maybeFail(failOnStop, "stop");
}
public void setFailOnInit(boolean failOnInit) {
this.failOnInit = failOnInit;
}
public void setFailOnStart(boolean failOnStart) {
this.failOnStart = failOnStart;
}
public void setFailOnStop(boolean failOnStop) {
this.failOnStop = failOnStop;
}
/**
* The exception explicitly raised on a failure.
*/
public static | BreakableService |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/spi/ColumnsAndFormulasSourceContainer.java | {
"start": 263,
"end": 396
} | interface ____ {
RelationalValueSourceHelper.ColumnsAndFormulasSource getColumnsAndFormulasSource();
}
| ColumnsAndFormulasSourceContainer |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/ssl/SslServerWithP12WithSniMatchingSanDNSTest.java | {
"start": 1377,
"end": 3039
} | class ____ {
@TestHTTPResource(value = "/ssl", ssl = true)
URL url;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyBean.class)
.addAsResource(new File("target/certs/ssl-test-sni-keystore.p12"), "server-keystore.pkcs12"))
.overrideConfigKey("quarkus.http.ssl.certificate.key-store-file", "server-keystore.pkcs12")
.overrideConfigKey("quarkus.http.ssl.certificate.key-store-password", "secret")
.overrideConfigKey("quarkus.http.ssl.certificate.key-store-alias-password", "secret")
.overrideConfigKey("quarkus.http.ssl.sni", "true");
@Inject
Vertx vertx;
@Test
public void testSslServerWithPkcs12() {
// Cannot use RESTAssured as it does not validate the certificate names (even when forced.)
WebClientOptions options = new WebClientOptions()
.setSsl(true)
.setTrustOptions(new io.vertx.core.net.JksOptions()
.setPath("target/certs/ssl-test-sni-truststore.jks")
.setPassword("secret"))
.setForceSni(true);
WebClient client = WebClient.create(vertx, options);
HttpResponse<Buffer> response = client.getAbs(url.toExternalForm()).send().toCompletionStage().toCompletableFuture()
.join();
Assertions.assertThat(response.statusCode()).isEqualTo(200);
Assertions.assertThat(response.bodyAsString()).isEqualTo("ssl");
}
@ApplicationScoped
static | SslServerWithP12WithSniMatchingSanDNSTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealmAuthenticateTests.java | {
"start": 1824,
"end": 39525
} | class ____ extends JwtRealmTestCase {
/**
* Test with empty roles.
* @throws Exception Unexpected test failure
*/
public void testJwtAuthcRealmAuthcAuthzWithEmptyRoles() throws Exception {
jwtIssuerAndRealms = generateJwtIssuerRealmPairs(
randomIntBetween(1, 1), // realmsRange
randomIntBetween(0, 1), // authzRange
randomIntBetween(1, JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS.size()), // algsRange
randomIntBetween(1, 3), // audiencesRange
randomIntBetween(1, 3), // usersRange
randomIntBetween(0, 0), // rolesRange
randomIntBetween(0, 1), // jwtCacheSizeRange
randomBoolean(), // createHttpsServer
false // jwkSetReloadEnabled
);
final JwtIssuerAndRealm jwtIssuerAndRealm = randomJwtIssuerRealmPair();
final User user = randomUser(jwtIssuerAndRealm.issuer());
final SecureString jwt = randomJwt(jwtIssuerAndRealm, user);
final SecureString clientSecret = JwtRealmInspector.getClientAuthenticationSharedSecret(jwtIssuerAndRealm.realm());
final int jwtAuthcCount = randomIntBetween(2, 3);
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwt, clientSecret, jwtAuthcCount);
}
public void testJwtCache() throws Exception {
jwtIssuerAndRealms = generateJwtIssuerRealmPairs(1, 1, 1, 1, 1, 1, 99, false, false);
JwtRealm realm = jwtIssuerAndRealms.get(0).realm();
realm.expireAll();
assertThat(realm.getJwtCache().count(), is(0));
final JwtIssuerAndRealm jwtIssuerAndRealm = randomJwtIssuerRealmPair();
final SecureString clientSecret = JwtRealmInspector.getClientAuthenticationSharedSecret(jwtIssuerAndRealm.realm());
for (int i = 1; i <= randomIntBetween(2, 10); i++) {
User user = randomUser(jwtIssuerAndRealm.issuer());
doMultipleAuthcAuthzAndVerifySuccess(
jwtIssuerAndRealm.realm(),
user,
randomJwt(jwtIssuerAndRealm, user),
clientSecret,
randomIntBetween(2, 10)
);
assertThat(realm.getJwtCache().count(), is(i));
}
}
/**
* Test with no authz realms.
* @throws Exception Unexpected test failure
*/
public void testJwtAuthcRealmAuthcAuthzWithoutAuthzRealms() throws Exception {
jwtIssuerAndRealms = generateJwtIssuerRealmPairs(
randomIntBetween(1, 3), // realmsRange
randomIntBetween(0, 0), // authzRange
randomIntBetween(1, JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS.size()), // algsRange
randomIntBetween(1, 3), // audiencesRange
randomIntBetween(1, 3), // usersRange
randomIntBetween(0, 3), // rolesRange
randomIntBetween(0, 1), // jwtCacheSizeRange
randomBoolean(), // createHttpsServer
false // jwkSetReloadEnabled
);
final JwtIssuerAndRealm jwtIssuerAndRealm = randomJwtIssuerRealmPair();
assertThat(jwtIssuerAndRealm.realm().delegatedAuthorizationSupport.hasDelegation(), is(false));
final User user = randomUser(jwtIssuerAndRealm.issuer());
final SecureString jwt = randomJwt(jwtIssuerAndRealm, user);
final SecureString clientSecret = JwtRealmInspector.getClientAuthenticationSharedSecret(jwtIssuerAndRealm.realm());
final int jwtAuthcCount = randomIntBetween(2, 3);
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwt, clientSecret, jwtAuthcCount);
}
/**
* Test with updated/removed/restored JWKs.
* @throws Exception Unexpected test failure
*/
public void testJwkSetUpdates() throws Exception {
jwtIssuerAndRealms = generateJwtIssuerRealmPairs(
randomIntBetween(1, 3), // realmsRange
randomIntBetween(0, 0), // authzRange
randomIntBetween(1, JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS.size()), // algsRange
randomIntBetween(1, 3), // audiencesRange
randomIntBetween(1, 3), // usersRange
randomIntBetween(0, 3), // rolesRange
randomIntBetween(0, 1), // jwtCacheSizeRange
randomBoolean(), // createHttpsServer
false // jwkSetReloadEnabled
);
final JwtIssuerAndRealm jwtIssuerAndRealm = randomJwtIssuerRealmPair();
assertThat(jwtIssuerAndRealm.realm().delegatedAuthorizationSupport.hasDelegation(), is(false));
final User user = randomUser(jwtIssuerAndRealm.issuer());
final SecureString jwtJwks1 = randomJwt(jwtIssuerAndRealm, user);
final SecureString clientSecret = JwtRealmInspector.getClientAuthenticationSharedSecret(jwtIssuerAndRealm.realm());
final int jwtAuthcCount = randomIntBetween(2, 3);
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks1, clientSecret, jwtAuthcCount);
// Details about first JWT using the JWT issuer original JWKs
final String jwt1JwksAlg = SignedJWT.parse(jwtJwks1.toString()).getHeader().getAlgorithm().getName();
final boolean isPkcJwtJwks1 = JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS_PKC.contains(jwt1JwksAlg);
logger.debug("JWT alg=[{}]", jwt1JwksAlg);
// Backup JWKs 1
final List<JwtIssuer.AlgJwkPair> jwtIssuerJwks1Backup = jwtIssuerAndRealm.issuer().algAndJwksAll;
final boolean jwtIssuerJwks1OidcSafe = JwkValidateUtilTests.areJwkHmacOidcSafe(
jwtIssuerJwks1Backup.stream().map(e -> e.jwk()).toList()
);
logger.debug("JWKs 1, algs=[{}]", String.join(",", jwtIssuerAndRealm.issuer().algorithmsAll));
// Empty all JWT issuer JWKs.
logger.debug("JWKs 1 backed up, algs=[{}]", String.join(",", jwtIssuerAndRealm.issuer().algorithmsAll));
jwtIssuerAndRealm.issuer().setJwks(Collections.emptyList(), jwtIssuerJwks1OidcSafe);
printJwtIssuer(jwtIssuerAndRealm.issuer());
copyIssuerJwksToRealmConfig(jwtIssuerAndRealm);
logger.debug("JWKs 1 emptied, algs=[{}]", String.join(",", jwtIssuerAndRealm.issuer().algorithmsAll));
// Original JWT continues working, because JWT realm cached old JWKs in memory.
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks1, clientSecret, jwtAuthcCount);
logger.debug("JWT 1 still worked, because JWT realm has old JWKs cached in memory");
// Restore original JWKs 1 into the JWT issuer.
jwtIssuerAndRealm.issuer().setJwks(jwtIssuerJwks1Backup, jwtIssuerJwks1OidcSafe);
printJwtIssuer(jwtIssuerAndRealm.issuer());
copyIssuerJwksToRealmConfig(jwtIssuerAndRealm);
logger.debug("JWKs 1 restored, algs=[{}]", String.join(",", jwtIssuerAndRealm.issuer().algorithmsAll));
// Original JWT continues working, because JWT realm cached old JWKs in memory.
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks1, clientSecret, jwtAuthcCount);
logger.debug("JWT 1 still worked, because JWT realm has old JWKs cached in memory");
// Generate a replacement set of JWKs 2 for the JWT issuer.
final List<JwtIssuer.AlgJwkPair> jwtIssuerJwks2Backup = JwtRealmTestCase.randomJwks(
jwtIssuerJwks1Backup.stream().map(e -> e.alg()).toList(),
jwtIssuerJwks1OidcSafe
);
jwtIssuerAndRealm.issuer().setJwks(jwtIssuerJwks2Backup, jwtIssuerJwks1OidcSafe);
printJwtIssuer(jwtIssuerAndRealm.issuer());
copyIssuerJwksToRealmConfig(jwtIssuerAndRealm);
logger.debug("JWKs 2 created, algs=[{}]", String.join(",", jwtIssuerAndRealm.issuer().algorithmsAll));
// Original JWT continues working, because JWT realm still has original JWKs cached in memory.
// - jwtJwks1(PKC): Pass (Original PKC JWKs are still in the realm)
// - jwtJwks1(HMAC): Pass (Original HMAC JWKs are still in the realm)
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks1, clientSecret, jwtAuthcCount);
logger.debug("JWT 1 still worked, because JWT realm has old JWKs cached in memory");
// Create a JWT using the new JWKs.
final SecureString jwtJwks2 = randomJwt(jwtIssuerAndRealm, user);
final String jwtJwks2Alg = SignedJWT.parse(jwtJwks2.toString()).getHeader().getAlgorithm().getName();
final boolean isPkcJwtJwks2 = JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS_PKC.contains(jwtJwks2Alg);
logger.debug("Created JWT 2: oidcSafe=[{}], algs=[{}, {}]", jwtIssuerJwks1OidcSafe, jwt1JwksAlg, jwtJwks2Alg);
// Try new JWT.
// - jwtJwks2(PKC): PKC reload triggered and loaded new JWKs, so PASS
// - jwtJwks2(HMAC): HMAC reload triggered but it is a no-op, so FAIL
if (isPkcJwtJwks2) {
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks2, clientSecret, jwtAuthcCount);
logger.debug("PKC JWT 2 worked with JWKs 2");
} else {
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtJwks2, clientSecret);
logger.debug("HMAC JWT 2 failed with JWKs 1");
}
// Try old JWT.
// - jwtJwks2(PKC): PKC reload triggered and loaded new JWKs, jwtJwks1(PKC): PKC reload triggered and loaded new JWKs, so FAIL
// - jwtJwks2(PKC): PKC reload triggered and loaded new JWKs, jwtJwks1(HMAC): HMAC reload not triggered, so PASS
// - jwtJwks2(HMAC): HMAC reload triggered but it is a no-op, jwtJwks1(PKC): PKC reload not triggered, so PASS
// - jwtJwks2(HMAC): HMAC reload triggered but it is a no-op, jwtJwks1(HMAC): HMAC reload not triggered, so PASS
if (isPkcJwtJwks1 == false || isPkcJwtJwks2 == false) {
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks1, clientSecret, jwtAuthcCount);
} else {
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtJwks1, clientSecret);
}
// Empty all JWT issuer JWKs.
jwtIssuerAndRealm.issuer().setJwks(Collections.emptyList(), jwtIssuerJwks1OidcSafe);
printJwtIssuer(jwtIssuerAndRealm.issuer());
copyIssuerJwksToRealmConfig(jwtIssuerAndRealm);
// New JWT continues working because JWT realm will end up with PKC JWKs 2 and HMAC JWKs 1 in memory
if (isPkcJwtJwks2) {
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks2, clientSecret, jwtAuthcCount);
} else {
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtJwks2, clientSecret);
}
// Trigger JWT realm to reload JWKs and go into a degraded state
// - jwtJwks1(HMAC): HMAC reload not triggered, so PASS
// - jwtJwks1(PKC): PKC reload triggered and loaded new JWKs, so FAIL
if (isPkcJwtJwks1 == false || isPkcJwtJwks2 == false) {
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks1, clientSecret, jwtAuthcCount);
} else {
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtJwks1, clientSecret);
}
// Try new JWT and verify degraded state caused by empty PKC JWKs
// - jwtJwks1(PKC) + jwtJwks2(PKC): If second JWT is PKC, and first JWT is PKC, degraded state can be tested.
// - jwtJwks1(HMAC) + jwtJwks2(PKC): If second JWT is PKC, but first JWT is HMAC, HMAC JWT 1 above didn't trigger PKC reload.
// - jwtJwks1(PKC) + jwtJwks2(HMAC): If second JWT is HMAC, it always fails because HMAC reload not supported.
// - jwtJwks1(HMAC) + jwtJwks2(HMAC): If second JWT is HMAC, it always fails because HMAC reload not supported.
if (isPkcJwtJwks1 == false && isPkcJwtJwks2) {
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks2, clientSecret, jwtAuthcCount);
} else {
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtJwks2, clientSecret);
}
// Restore JWKs 2 to the realm
jwtIssuerAndRealm.issuer().setJwks(jwtIssuerJwks2Backup, jwtIssuerJwks1OidcSafe);
copyIssuerJwksToRealmConfig(jwtIssuerAndRealm);
printJwtIssuer(jwtIssuerAndRealm.issuer());
// Trigger JWT realm to reload JWKs and go into a recovered state
// - jwtJwks2(PKC): Pass (Triggers PKC reload, gets newer PKC JWKs), jwtJwks1(PKC): Fail (Triggers PKC reload, gets new PKC JWKs)
// - jwtJwks2(PKC): Pass (Triggers PKC reload, gets newer PKC JWKs), jwtJwks1(HMAC): Pass (HMAC reload was a no-op)
// - jwtJwks2(HMAC): Fail (Triggers HMAC reload, but it is a no-op), jwtJwks1(PKC): Fail (Triggers PKC reload, gets new PKC JWKs)
// - jwtJwks2(HMAC): Fail (Triggers HMAC reload, but it is a no-op), jwtJwks1(HMAC): Pass (HMAC reload was a no-op)
if (isPkcJwtJwks2) {
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks2, clientSecret, jwtAuthcCount);
} else {
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtJwks2, clientSecret);
}
if (isPkcJwtJwks1 == false || isPkcJwtJwks2 == false) {
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks1, clientSecret, jwtAuthcCount);
} else {
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtJwks1, clientSecret);
}
}
public void testJwkUpdatesByReloadWithFile() throws Exception {
doTestJwkUpdatesByReload(false);
}
public void testJwkUpdatesByReloadWithWebServer() throws Exception {
doTestJwkUpdatesByReload(true);
}
private void doTestJwkUpdatesByReload(boolean httpsServer) throws Exception {
jwtIssuerAndRealms = generateJwtIssuerRealmPairs(
1, // realmsRange
1, // authzRange
1, // algsRange
1, // audiencesRange
1, // usersRange
1, // rolesRange
0, // jwtCacheSizeRange (disabled cache)
httpsServer, // createHttpsServer,
true
);
JwtIssuerAndRealm jwtIssuerAndRealm = randomJwtIssuerRealmPair();
User user = randomUser(jwtIssuerAndRealm.issuer());
SecureString jwtJwks = randomJwt(jwtIssuerAndRealm, user);
SecureString clientSecret = JwtRealmInspector.getClientAuthenticationSharedSecret(jwtIssuerAndRealm.realm());
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwtJwks, clientSecret, 1);
List<JwtIssuer.AlgJwkPair> jwtIssuerJwks = jwtIssuerAndRealm.issuer().algAndJwksAll;
// clear all PKC JWK sets
configureJwkSets(jwtIssuerAndRealm, Collections.emptyList());
awaitAuthenticationFailure(jwtIssuerAndRealm, jwtJwks, clientSecret);
// restore PKC JWK sets
configureJwkSets(jwtIssuerAndRealm, jwtIssuerJwks);
awaitAuthenticationSuccess(jwtIssuerAndRealm, user, jwtJwks, clientSecret);
}
private void configureJwkSets(JwtIssuerAndRealm jwtIssuerAndRealm, List<JwtIssuer.AlgJwkPair> algAndJwks) throws Exception {
jwtIssuerAndRealm.issuer().setJwks(algAndJwks, false);
printJwtIssuer(jwtIssuerAndRealm.issuer());
copyIssuerJwksToRealmConfig(jwtIssuerAndRealm);
}
private void awaitAuthenticationFailure(JwtIssuerAndRealm jwtIssAndRealm, SecureString jwtJwks, SecureString clientSecret) {
assertTrue(waitUntil(() -> {
try {
verifyAuthenticateFailureHelper(jwtIssAndRealm, jwtJwks, clientSecret);
return true;
} catch (Exception | AssertionError e) {
return false;
}
}, 5, TimeUnit.SECONDS));
}
private void awaitAuthenticationSuccess(JwtIssuerAndRealm jwtIssAndRealm, User user, SecureString jwtJwks, SecureString clientSecret) {
assertTrue(waitUntil(() -> {
try {
doMultipleAuthcAuthzAndVerifySuccess(jwtIssAndRealm.realm(), user, jwtJwks, clientSecret, 1);
return true;
} catch (AssertionError e) {
return false;
}
}, 5, TimeUnit.SECONDS));
}
/**
* Test with authz realms.
* @throws Exception Unexpected test failure
*/
public void testJwtAuthcRealmAuthcAuthzWithAuthzRealms() throws Exception {
jwtIssuerAndRealms = generateJwtIssuerRealmPairs(
randomIntBetween(1, 3), // realmsRange
randomIntBetween(1, 3), // authzRange
randomIntBetween(1, JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS.size()), // algsRange
randomIntBetween(1, 3), // audiencesRange
randomIntBetween(1, 3), // usersRange
randomIntBetween(0, 3), // rolesRange
randomIntBetween(0, 1), // jwtCacheSizeRange
randomBoolean(), // createHttpsServer
false // jwkSetReloadEnabled
);
final JwtIssuerAndRealm jwtIssuerAndRealm = randomJwtIssuerRealmPair();
assertThat(jwtIssuerAndRealm.realm().delegatedAuthorizationSupport.hasDelegation(), is(true));
final User user = randomUser(jwtIssuerAndRealm.issuer());
final SecureString jwt = randomJwt(jwtIssuerAndRealm, user);
final SecureString clientSecret = JwtRealmInspector.getClientAuthenticationSharedSecret(jwtIssuerAndRealm.realm());
final int jwtAuthcCount = randomIntBetween(2, 3);
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwt, clientSecret, jwtAuthcCount);
// After the above success path test, do a negative path test for an authc user that does not exist in any authz realm.
// In other words, above the `user` was found in an authz realm, but below `otherUser` will not be found in any authz realm.
{
final String otherUsername = randomValueOtherThanMany(
candidate -> jwtIssuerAndRealm.issuer().principals.containsKey(candidate),
() -> randomAlphaOfLengthBetween(4, 12)
);
final User otherUser = new User(otherUsername);
final SecureString otherJwt = randomJwt(jwtIssuerAndRealm, otherUser);
final AuthenticationToken otherToken = jwtIssuerAndRealm.realm().token(createThreadContext(otherJwt, clientSecret));
final PlainActionFuture<AuthenticationResult<User>> otherFuture = new PlainActionFuture<>();
jwtIssuerAndRealm.realm().authenticate(otherToken, otherFuture);
final AuthenticationResult<User> otherResult = otherFuture.actionGet();
assertThat(otherResult.isAuthenticated(), is(false));
assertThat(otherResult.getException(), nullValue());
assertThat(
otherResult.getMessage(),
containsString("[" + otherUsername + "] was authenticated, but no user could be found in realms [")
);
}
}
/**
* Verify that a JWT realm successfully connects to HTTPS server, and can handle an HTTP 404 Not Found response correctly.
* @throws Exception Unexpected test failure
*/
public void testPkcJwkSetUrlNotFound() throws Exception {
final List<Realm> allRealms = new ArrayList<>(); // authc and authz realms
final boolean createHttpsServer = true; // force issuer to create HTTPS server for its PKC JWKSet
final JwtIssuer jwtIssuer = createJwtIssuer(0, 12, 1, 1, 1, createHttpsServer, false);
assertThat(jwtIssuer.httpsServer, notNullValue());
try {
final JwtRealmSettingsBuilder jwtRealmSettingsBuilder = createJwtRealmSettingsBuilder(jwtIssuer, 0, 0, false);
final String configKey = RealmSettings.getFullSettingKey(jwtRealmSettingsBuilder.name(), JwtRealmSettings.PKC_JWKSET_PATH);
final String configValue = jwtIssuer.httpsServer.url.replace("/valid/", "/invalid"); // right host, wrong path
jwtRealmSettingsBuilder.settingsBuilder().put(configKey, configValue);
final Exception exception = expectThrows(
SettingsException.class,
() -> createJwtRealm(allRealms, jwtIssuer, jwtRealmSettingsBuilder)
);
assertThat(exception.getMessage(), equalTo("Can't get contents for setting [" + configKey + "] value [" + configValue + "]."));
assertThat(exception.getCause().getMessage(), equalTo("Get [" + configValue + "] failed, status [404], reason [Not Found]."));
} finally {
jwtIssuer.close();
}
}
/**
* Test token parse failures and authentication failures.
* @throws Exception Unexpected test failure
*/
public void testJwtValidationFailures() throws Exception {
jwtIssuerAndRealms = generateJwtIssuerRealmPairs(
randomIntBetween(1, 1), // realmsRange
randomIntBetween(0, 0), // authzRange
randomIntBetween(1, JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS.size()), // algsRange
randomIntBetween(1, 1), // audiencesRange
randomIntBetween(1, 1), // usersRange
randomIntBetween(1, 1), // rolesRange
randomIntBetween(0, 1), // jwtCacheSizeRange
randomBoolean(), // createHttpsServer
false // jwkSetReloadEnabled
);
final JwtIssuerAndRealm jwtIssuerAndRealm = randomJwtIssuerRealmPair();
final User user = randomUser(jwtIssuerAndRealm.issuer());
final SecureString jwt = randomJwt(jwtIssuerAndRealm, user);
final SecureString clientSecret = JwtRealmInspector.getClientAuthenticationSharedSecret(jwtIssuerAndRealm.realm());
final int jwtAuthcCount = randomIntBetween(2, 3);
// Indirectly verify authentication works before performing any failure scenarios
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwt, clientSecret, jwtAuthcCount);
// The above confirmed JWT realm authc/authz is working.
// Now perform negative path tests to confirm JWT validation rejects invalid JWTs for different scenarios.
{ // Do one more direct SUCCESS scenario by checking token() and authenticate() directly before moving on to FAILURE scenarios.
final ThreadContext requestThreadContext = createThreadContext(jwt, clientSecret);
final JwtAuthenticationToken token = (JwtAuthenticationToken) jwtIssuerAndRealm.realm().token(requestThreadContext);
final PlainActionFuture<AuthenticationResult<User>> plainActionFuture = new PlainActionFuture<>();
jwtIssuerAndRealm.realm().authenticate(token, plainActionFuture);
assertThat(plainActionFuture.get(), notNullValue());
assertThat(plainActionFuture.get().isAuthenticated(), is(true));
}
// Directly verify FAILURE scenarios for token() parsing failures and authenticate() validation failures.
// Null JWT
final ThreadContext tc1 = createThreadContext(null, clientSecret);
assertThat(jwtIssuerAndRealm.realm().token(tc1), nullValue());
// Empty JWT string
final ThreadContext tc2 = createThreadContext("", clientSecret);
assertThat(jwtIssuerAndRealm.realm().token(tc2), nullValue());
// Non-empty whitespace JWT string
final ThreadContext tc3 = createThreadContext(" ", clientSecret);
assertThat(jwtIssuerAndRealm.realm().token(tc3), nullValue());
// Blank client secret
final ThreadContext tc4 = createThreadContext(jwt, "");
final Exception e4 = expectThrows(IllegalArgumentException.class, () -> jwtIssuerAndRealm.realm().token(tc4));
assertThat(e4.getMessage(), equalTo("Client shared secret must be non-empty"));
// Non-empty whitespace JWT client secret
final ThreadContext tc5 = createThreadContext(jwt, " ");
final Exception e5 = expectThrows(IllegalArgumentException.class, () -> jwtIssuerAndRealm.realm().token(tc5));
assertThat(e5.getMessage(), equalTo("Client shared secret must be non-empty"));
// JWT parse exception
final ThreadContext tc6 = createThreadContext("Head.Body.Sig", clientSecret);
assertThat(jwtIssuerAndRealm.realm().token(tc6), nullValue());
// Parse JWT into three parts, for rejecting testing of tampered JWT contents
final SignedJWT parsedJwt = SignedJWT.parse(jwt.toString());
final JWSHeader validHeader = parsedJwt.getHeader();
final JWTClaimsSet validClaimsSet = parsedJwt.getJWTClaimsSet();
final Base64URL validSignature = parsedJwt.getSignature();
{ // Verify rejection of unsigned JWT
final SecureString unsignedJwt = new SecureString(new PlainJWT(validClaimsSet).serialize().toCharArray());
final ThreadContext tc = createThreadContext(unsignedJwt, clientSecret);
assertThat(jwtIssuerAndRealm.realm().token(tc), nullValue());
}
{ // Verify rejection of a tampered header (flip HMAC=>RSA or RSA/EC=>HMAC)
final String mixupAlg; // Check if there are any algorithms available in the realm for attempting a flip test
if (JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS_HMAC.contains(validHeader.getAlgorithm().getName())) {
if (JwtRealmInspector.getJwksAlgsPkc(jwtIssuerAndRealm.realm()).algs().isEmpty()) {
mixupAlg = null; // cannot flip HMAC to PKC (no PKC algs available)
} else {
mixupAlg = randomFrom(JwtRealmInspector.getJwksAlgsPkc(jwtIssuerAndRealm.realm()).algs()); // flip HMAC to PKC
}
} else {
if (JwtRealmInspector.getJwksAlgsHmac(jwtIssuerAndRealm.realm()).algs().isEmpty()) {
mixupAlg = null; // cannot flip PKC to HMAC (no HMAC algs available)
} else {
mixupAlg = randomFrom(JwtRealmInspector.getJwksAlgsHmac(jwtIssuerAndRealm.realm()).algs()); // flip HMAC to PKC
}
}
// This check can only be executed if there is a flip algorithm available in the realm
if (Strings.hasText(mixupAlg)) {
final JWSHeader tamperedHeader = new JWSHeader.Builder(JWSAlgorithm.parse(mixupAlg)).build();
final SecureString jwtTamperedHeader = buildJwt(tamperedHeader, validClaimsSet, validSignature);
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtTamperedHeader, clientSecret);
}
}
{ // Verify rejection of a tampered claim set
final JWTClaimsSet tamperedClaimsSet = new JWTClaimsSet.Builder(validClaimsSet).claim("gr0up", "superuser").build();
final SecureString jwtTamperedClaimsSet = buildJwt(validHeader, tamperedClaimsSet, validSignature);
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtTamperedClaimsSet, clientSecret);
}
{ // Verify rejection of a tampered signature
final SecureString jwtWithTruncatedSignature = new SecureString(jwt.toString().substring(0, jwt.length() - 1).toCharArray());
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtWithTruncatedSignature, clientSecret);
}
// Get read to re-sign JWTs for time claim failure tests
final JwtIssuer.AlgJwkPair algJwkPair = randomFrom(jwtIssuerAndRealm.issuer().algAndJwksAll);
final JWSHeader jwtHeader = new JWSHeader.Builder(JWSAlgorithm.parse(algJwkPair.alg())).build();
final Instant now = Instant.now();
final Date past = Date.from(now.minusSeconds(86400));
final Date future = Date.from(now.plusSeconds(86400));
{ // Verify rejection of JWT auth_time > now
final JWTClaimsSet claimsSet = new JWTClaimsSet.Builder(validClaimsSet).claim("auth_time", future).build();
final SecureString jwtIatFuture = signJwt(algJwkPair.jwk(), new SignedJWT(jwtHeader, claimsSet));
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtIatFuture, clientSecret);
}
{ // Verify rejection of JWT iat > now
final JWTClaimsSet claimsSet = new JWTClaimsSet.Builder(validClaimsSet).issueTime(future).build();
final SecureString jwtIatFuture = signJwt(algJwkPair.jwk(), new SignedJWT(jwtHeader, claimsSet));
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtIatFuture, clientSecret);
}
{ // Verify rejection of JWT nbf > now
final JWTClaimsSet claimsSet = new JWTClaimsSet.Builder(validClaimsSet).notBeforeTime(future).build();
final SecureString jwtIatFuture = signJwt(algJwkPair.jwk(), new SignedJWT(jwtHeader, claimsSet));
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtIatFuture, clientSecret);
}
{ // Verify rejection of JWT now > exp
final JWTClaimsSet claimsSet = new JWTClaimsSet.Builder(validClaimsSet).expirationTime(past).build();
final SecureString jwtExpPast = signJwt(algJwkPair.jwk(), new SignedJWT(jwtHeader, claimsSet));
verifyAuthenticateFailureHelper(jwtIssuerAndRealm, jwtExpPast, clientSecret);
}
}
/**
* Configure two realms for same issuer. Use identical realm config, except different client secrets.
* Generate a JWT which is valid for both realms, but verify authentication only succeeds for second realm with correct client secret.
* @throws Exception Unexpected test failure
*/
public void testSameIssuerTwoRealmsDifferentClientSecrets() throws Exception {
final int realmsCount = 2;
final List<Realm> allRealms = new ArrayList<>(realmsCount); // two identical realms for same issuer, except different client secret
final JwtIssuer jwtIssuer = createJwtIssuer(0, 12, 1, 1, 1, false, false);
printJwtIssuer(jwtIssuer);
jwtIssuerAndRealms = new ArrayList<>(realmsCount);
for (int i = 0; i < realmsCount; i++) {
final String realmName = "realm_" + jwtIssuer.issuerClaimValue + "_" + i;
final String clientSecret = "clientSecret_" + jwtIssuer.issuerClaimValue + "_" + i;
final Settings.Builder authcSettings = Settings.builder()
.put(globalSettings)
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_ISSUER), jwtIssuer.issuerClaimValue)
.put(
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SIGNATURE_ALGORITHMS),
String.join(",", jwtIssuer.algorithmsAll)
)
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_AUDIENCES), jwtIssuer.audiencesClaimValue.get(0))
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.CLAIMS_PRINCIPAL.getClaim()), jwtIssuer.principalClaimName)
.put(
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.CLIENT_AUTHENTICATION_TYPE),
JwtRealmSettings.ClientAuthenticationType.SHARED_SECRET.value()
);
if (jwtIssuer.encodedJwkSetPkcPublic.isEmpty() == false) {
authcSettings.put(
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.PKC_JWKSET_PATH),
saveToTempFile("jwkset.", ".json", jwtIssuer.encodedJwkSetPkcPublic)
);
}
// JWT authc realm secure settings
final MockSecureSettings secureSettings = new MockSecureSettings();
if (jwtIssuer.algAndJwksHmac.isEmpty() == false) {
secureSettings.setString(
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.HMAC_JWKSET),
jwtIssuer.encodedJwkSetHmac
);
}
if (jwtIssuer.encodedKeyHmacOidc != null) {
secureSettings.setString(
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.HMAC_KEY),
jwtIssuer.encodedKeyHmacOidc
);
}
secureSettings.setString(
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.CLIENT_AUTHENTICATION_SHARED_SECRET),
clientSecret
);
authcSettings.setSecureSettings(secureSettings);
final JwtRealmSettingsBuilder jwtRealmSettingsBuilder = new JwtRealmSettingsBuilder(realmName, authcSettings);
final JwtRealm jwtRealm = createJwtRealm(allRealms, jwtIssuer, jwtRealmSettingsBuilder);
jwtRealm.initialize(allRealms, licenseState);
final JwtIssuerAndRealm jwtIssuerAndRealm = new JwtIssuerAndRealm(jwtIssuer, jwtRealm, jwtRealmSettingsBuilder);
jwtIssuerAndRealms.add(jwtIssuerAndRealm); // add them so the test will clean them up
printJwtRealm(jwtRealm);
}
// pick 2nd realm and use its secret, verify 2nd realm does authc, which implies 1st realm rejects the secret
final JwtIssuerAndRealm jwtIssuerAndRealm = jwtIssuerAndRealms.get(1);
final User user = randomUser(jwtIssuerAndRealm.issuer());
final SecureString jwt = randomJwt(jwtIssuerAndRealm, user);
final SecureString clientSecret = JwtRealmInspector.getClientAuthenticationSharedSecret(jwtIssuerAndRealm.realm());
final int jwtAuthcCount = randomIntBetween(2, 3);
doMultipleAuthcAuthzAndVerifySuccess(jwtIssuerAndRealm.realm(), user, jwt, clientSecret, jwtAuthcCount);
}
public void testConcurrentPutAndInvalidateCacheWorks() throws Exception {
jwtIssuerAndRealms = generateJwtIssuerRealmPairs(
randomIntBetween(1, 1), // realmsRange
randomIntBetween(0, 0), // authzRange
randomIntBetween(1, JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS.size()), // algsRange
randomIntBetween(1, 1), // audiencesRange
randomIntBetween(1, 1), // usersRange
randomIntBetween(1, 1), // rolesRange
randomIntBetween(1, 1), // jwtCacheSizeRange set to 1 for constant eviction that is necessary to trigger the locking when put
false, // createHttpsServer
false // jwkSetReloadEnabled
);
final JwtIssuerAndRealm jwtIssuerAndRealm = randomJwtIssuerRealmPair();
final User user = randomUser(jwtIssuerAndRealm.issuer());
final SecureString jwt = randomJwt(jwtIssuerAndRealm, user);
final SignedJWT parsedJwt = SignedJWT.parse(jwt.toString());
final JWTClaimsSet validClaimsSet = parsedJwt.getJWTClaimsSet();
final int processors = Runtime.getRuntime().availableProcessors();
final int numberOfThreads = Math.min(50, scaledRandomIntBetween((processors + 1) / 2, 4 * processors)); // up to 50 threads
final Thread[] threads = new Thread[numberOfThreads];
final CountDownLatch threadsCountDown = new CountDownLatch(numberOfThreads);
final CountDownLatch racingCountDown = new CountDownLatch(1);
final CountDownLatch completionCountDown = new CountDownLatch(numberOfThreads);
for (int i = 0; i < numberOfThreads; i++) {
if (randomBoolean()) {
threads[i] = new Thread(() -> {
threadsCountDown.countDown();
try {
if (racingCountDown.await(10, TimeUnit.SECONDS)) {
jwtIssuerAndRealm.realm().expireAll();
completionCountDown.countDown();
} else {
throw new AssertionError("racing is not ready within the given time period");
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
} else {
threads[i] = new Thread(() -> {
final BytesArray jwtCacheKey = new BytesArray(randomAlphaOfLength(10));
final PlainActionFuture<AuthenticationResult<User>> future = new PlainActionFuture<>();
threadsCountDown.countDown();
try {
if (racingCountDown.await(10, TimeUnit.SECONDS)) {
for (int j = 0; j < 10; j++) {
jwtIssuerAndRealm.realm().processValidatedJwt("token-principal", jwtCacheKey, validClaimsSet, future);
assertThat(future.actionGet().getValue().principal(), equalTo(user.principal()));
}
completionCountDown.countDown();
} else {
throw new AssertionError("Racing is not ready within the given time period");
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
}
threads[i].start();
}
if (threadsCountDown.await(10, TimeUnit.SECONDS)) {
racingCountDown.countDown();
} else {
throw new AssertionError("Threads are not ready within the given time period");
}
if (false == completionCountDown.await(30, TimeUnit.SECONDS)) {
throw new AssertionError("Test is not completed in time, check whether threads had deadlock");
}
for (Thread thread : threads) {
thread.join();
}
}
}
| JwtRealmAuthenticateTests |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/RouteStartupFailShouldStopAlsoIssueTest.java | {
"start": 3264,
"end": 3745
} | class ____ extends DefaultEndpoint {
public MyEndpoint(String endpointUri, Component component) {
super(endpointUri, component);
}
@Override
public Producer createProducer() {
throw new UnsupportedOperationException("Not supported");
}
@Override
public Consumer createConsumer(Processor processor) {
return new MyFailConsumer(this, processor);
}
}
private static | MyEndpoint |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterNamingTest.java | {
"start": 7590,
"end": 7852
} | class ____<RESP> {
public <TBaz, Foo> void method(Foo f) {
TBaz bad = null;
Foo d = f;
}
}
""")
.addOutputLines(
"out/Test.java",
"""
| Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java | {
"start": 34266,
"end": 34560
} | class ____ {
public void doTest() {
Client client = new Client();
int x = client.multiply(5 + 3, 10);
}
}
""")
.addOutputLines(
"out/Caller.java",
"""
public final | Caller |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java | {
"start": 85215,
"end": 85938
} | class ____ extends Realm {
DummyRealm(RealmConfig config) {
super(config);
}
@Override
public boolean supports(AuthenticationToken token) {
return false;
}
@Override
public AuthenticationToken token(ThreadContext threadContext) {
return null;
}
@Override
public void authenticate(AuthenticationToken token, ActionListener<AuthenticationResult<User>> listener) {
listener.onResponse(AuthenticationResult.notHandled());
}
@Override
public void lookupUser(String username, ActionListener<User> listener) {
listener.onResponse(null);
}
}
}
| DummyRealm |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/io/DelimitedInputFormatSamplingTest.java | {
"start": 12147,
"end": 12576
} | class ____ extends DelimitedInputFormat<IntValue> {
private static final long serialVersionUID = 1L;
TestDelimitedInputFormat(Configuration configuration) {
super(null, configuration);
}
@Override
public IntValue readRecord(IntValue reuse, byte[] bytes, int offset, int numBytes) {
throw new UnsupportedOperationException();
}
}
}
| TestDelimitedInputFormat |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/ArchiveRootBuildItem.java | {
"start": 715,
"end": 880
} | class ____ the paths of directories or archives to be used as archive roots,
* as well as paths that should be excluded from indexing.
* </p>
*/
public final | contains |
java | google__guava | android/guava-tests/test/com/google/common/eventbus/outside/NeitherAbstractNorAnnotatedInSuperclassTest.java | {
"start": 1004,
"end": 1652
} | class ____ {
final List<Object> neitherOverriddenNorAnnotatedEvents = new ArrayList<>();
final List<Object> overriddenInSubclassNowhereAnnotatedEvents = new ArrayList<>();
final List<Object> overriddenAndAnnotatedInSubclassEvents = new ArrayList<>();
public void neitherOverriddenNorAnnotated(Object o) {
neitherOverriddenNorAnnotatedEvents.add(o);
}
public void overriddenInSubclassNowhereAnnotated(Object o) {
overriddenInSubclassNowhereAnnotatedEvents.add(o);
}
public void overriddenAndAnnotatedInSubclass(Object o) {
overriddenAndAnnotatedInSubclassEvents.add(o);
}
}
static | SuperClass |
java | junit-team__junit5 | junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/execution/DefaultExecutableInvoker.java | {
"start": 1046,
"end": 2022
} | class ____ implements ExecutableInvoker {
private final ExtensionContext extensionContext;
private final ExtensionRegistry extensionRegistry;
public DefaultExecutableInvoker(ExtensionContext extensionContext, ExtensionRegistry extensionRegistry) {
this.extensionContext = extensionContext;
this.extensionRegistry = extensionRegistry;
}
@Override
public <T> T invoke(Constructor<T> constructor, @Nullable Object outerInstance) {
@Nullable
Object[] arguments = resolveParameters(constructor, Optional.empty(), Optional.ofNullable(outerInstance),
extensionContext, extensionRegistry);
return ReflectionUtils.newInstance(constructor, arguments);
}
@Override
public @Nullable Object invoke(Method method, @Nullable Object target) {
@Nullable
Object[] arguments = resolveParameters(method, Optional.ofNullable(target), extensionContext,
extensionRegistry);
return MethodReflectionUtils.invoke(method, target, arguments);
}
}
| DefaultExecutableInvoker |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3747/Issue3747Mapper.java | {
"start": 385,
"end": 450
} | interface ____ {
Target map(Source source);
| Issue3747Mapper |
java | google__guava | guava/src/com/google/common/cache/LocalCache.java | {
"start": 140019,
"end": 140753
} | class ____<T> extends AbstractSet<T> {
@Override
public int size() {
return LocalCache.this.size();
}
@Override
public boolean isEmpty() {
return LocalCache.this.isEmpty();
}
@Override
public void clear() {
LocalCache.this.clear();
}
}
boolean removeIf(BiPredicate<? super K, ? super V> filter) {
checkNotNull(filter);
boolean changed = false;
for (K key : keySet()) {
while (true) {
V value = get(key);
if (value == null || !filter.test(key, value)) {
break;
} else if (LocalCache.this.remove(key, value)) {
changed = true;
break;
}
}
}
return changed;
}
final | AbstractCacheSet |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/async/AsyncWaitStrategyFactory.java | {
"start": 902,
"end": 1052
} | interface ____ users to configure a custom Disruptor WaitStrategy used for
* Async Loggers and Async LoggerConfigs.
*
* @since 2.17.3
*/
public | allows |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/main/java/org/hibernate/processor/ContainsAttributeTypeVisitor.java | {
"start": 768,
"end": 2195
} | class ____ extends SimpleTypeVisitor8<Boolean, Element> {
private final Context context;
private final TypeElement type;
ContainsAttributeTypeVisitor(TypeElement elem, Context context) {
this.context = context;
this.type = elem;
}
@Override
public Boolean visitDeclared(DeclaredType declaredType, Element element) {
TypeElement returnedElement = (TypeElement) context.getTypeUtils().asElement(declaredType);
final String returnTypeName = NullnessUtil.castNonNull( returnedElement ).getQualifiedName().toString();
final String collection = COLLECTIONS.get(returnTypeName);
if (collection != null) {
final TypeMirror collectionElementType =
getCollectionElementType( declaredType, returnTypeName, null, context );
final Element collectionElement = context.getTypeUtils().asElement(collectionElementType);
if ( ElementKind.TYPE_PARAMETER == NullnessUtil.castNonNull( collectionElement ).getKind() ) {
return false;
}
returnedElement = (TypeElement) collectionElement;
}
return type.getQualifiedName().contentEquals( returnedElement.getQualifiedName() );
}
@Override
public Boolean visitExecutable(ExecutableType executable, Element element) {
return element.getKind() == ElementKind.METHOD
&& isProperty( element.getSimpleName().toString(), toTypeString( executable.getReturnType() ) )
&& executable.getReturnType().accept(this, element);
}
}
| ContainsAttributeTypeVisitor |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/JDKAtomicTypesDeserTest.java | {
"start": 3876,
"end": 4224
} | class ____ {
protected AtomicReference<String> _atomic;
@JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
public AtomicRefBeanWithEmpty(@JsonProperty("atomic")
@JsonSetter(nulls = Nulls.AS_EMPTY)
AtomicReference<String> ref) {
_atomic = ref;
}
}
static | AtomicRefBeanWithEmpty |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/HttpClientUpgradeHandler.java | {
"start": 2326,
"end": 2818
} | interface ____ {
/**
* Removes or disables the encoder of this codec so that the {@link UpgradeCodec} can send an initial greeting
* (if any).
*/
void prepareUpgradeFrom(ChannelHandlerContext ctx);
/**
* Removes this codec (i.e. all associated handlers) from the pipeline.
*/
void upgradeFrom(ChannelHandlerContext ctx);
}
/**
* A codec that the source can be upgraded to.
*/
public | SourceCodec |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/request/MockMvcRequestBuilders.java | {
"start": 1166,
"end": 1376
} | class ____ reuse a
* {@link org.springframework.mock.web.MockServletContext MockServletContext}
* that was created by the Spring TestContext Framework.
*
* <h3>Eclipse Users</h3>
* <p>Consider adding this | will |
java | apache__kafka | connect/mirror/src/main/java/org/apache/kafka/connect/mirror/formatters/OffsetSyncFormatter.java | {
"start": 1046,
"end": 1272
} | class ____ implements MessageFormatter {
@Override
public void writeTo(ConsumerRecord<byte[], byte[]> record, PrintStream output) {
output.println(OffsetSync.deserializeRecord(record));
}
}
| OffsetSyncFormatter |
java | google__dagger | javatests/dagger/internal/codegen/BindsDependsOnSubcomponentValidationTest.java | {
"start": 2191,
"end": 2559
} | interface ____ {",
" @Binds Foo bindFoo(FooImpl impl);",
"}");
Source childComponent =
CompilerTests.javaSource(
"test.ChildComponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent(modules = ChildModule.class)",
" | ParentModule |
java | apache__camel | tooling/camel-tooling-model/src/main/java/org/apache/camel/tooling/model/JsonMapper.java | {
"start": 1909,
"end": 41435
} | class ____ {
private JsonMapper() {
}
public static BaseModel<?> generateModel(Path file) {
try {
String json = Files.readString(file);
return generateModel(json);
} catch (IOException e) {
throw new RuntimeException("Error reading json file: " + file, e);
}
}
public static BaseModel<?> generateModel(String json) {
JsonObject obj = deserialize(json);
return generateModel(obj);
}
public static BaseModel<?> generateModel(JsonObject obj) {
if (obj.containsKey("component")) {
return generateComponentModel(obj);
} else if (obj.containsKey("language")) {
return generateLanguageModel(obj);
} else if (obj.containsKey("dataformat")) {
return generateDataFormatModel(obj);
} else if (obj.containsKey("transformer")) {
return generateTransformerModel(obj);
} else if (obj.containsKey("console")) {
return generateDevConsoleModel(obj);
} else if (obj.containsKey("other")) {
return generateOtherModel(obj);
} else if (obj.containsKey("model")) {
return generateEipModel(obj);
} else if (obj.containsKey("bean")) {
return generatePojoBeanModel(obj);
} else {
return null;
}
}
public static ComponentModel generateComponentModel(String json) {
JsonObject obj = deserialize(json);
return generateComponentModel(obj);
}
public static ComponentModel generateComponentModel(JsonObject obj) {
JsonObject mobj = (JsonObject) obj.get("component");
ComponentModel model = new ComponentModel();
parseComponentModel(mobj, model);
JsonObject mcprp = (JsonObject) obj.get("componentProperties");
if (mcprp != null) {
for (Map.Entry<String, Object> entry : mcprp.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
ComponentOptionModel option = new ComponentOptionModel();
parseOption(mp, option, entry.getKey());
model.addComponentOption(option);
}
}
JsonObject headers = (JsonObject) obj.get("headers");
if (headers != null) {
for (Map.Entry<String, Object> entry : headers.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
EndpointHeaderModel header = new EndpointHeaderModel();
parseOption(mp, header, entry.getKey());
header.setConstantName(mp.getString("constantName"));
model.addEndpointHeader(header);
}
}
JsonObject mprp = (JsonObject) obj.get("properties");
if (mprp != null) {
for (Map.Entry<String, Object> entry : mprp.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
EndpointOptionModel option = new EndpointOptionModel();
parseOption(mp, option, entry.getKey());
model.addEndpointOption(option);
}
}
JsonObject mprap = (JsonObject) obj.get("apis");
if (mprap != null) {
for (Map.Entry<String, Object> entry : mprap.entrySet()) {
String name = entry.getKey();
JsonObject mp = (JsonObject) entry.getValue();
ApiModel am = new ApiModel();
am.setName(name);
am.setDescription(mp.getStringOrDefault("description", ""));
am.setConsumerOnly(mp.getBooleanOrDefault("consumerOnly", false));
am.setProducerOnly(mp.getBooleanOrDefault("producerOnly", false));
model.getApiOptions().add(am);
Collection<String> aliases = mp.getCollection("aliases");
if (aliases != null && !aliases.isEmpty()) {
aliases.forEach(am::addAlias);
}
JsonObject mm = (JsonObject) mp.get("methods");
if (mm != null) {
for (Map.Entry<String, Object> mme : mm.entrySet()) {
JsonObject mmp = (JsonObject) mme.getValue();
ApiMethodModel amm = am.newMethod(mme.getKey());
Collection<String> signatures = mmp.getCollection("signatures");
if (signatures != null && !signatures.isEmpty()) {
signatures.forEach(amm::addSignature);
}
amm.setDescription(mmp.getStringOrDefault("description", ""));
}
}
}
}
mprap = (JsonObject) obj.get("apiProperties");
if (mprap != null) {
for (Map.Entry<String, Object> entry : mprap.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
String name = entry.getKey();
ApiModel am = model.getApiOptions().stream().filter(a -> a.getName().equals(name)).findFirst().orElse(null);
if (am == null) {
throw new RuntimeException("Invalid json. Cannot find ApiModel with name: " + name);
}
JsonObject mm = (JsonObject) mp.get("methods");
if (mm != null) {
for (Map.Entry<String, Object> mme : mm.entrySet()) {
JsonObject mmp = (JsonObject) mme.getValue();
String mname = mme.getKey();
ApiMethodModel amm
= am.getMethods().stream().filter(a -> a.getName().equals(mname)).findFirst().orElse(null);
if (amm == null) {
throw new RuntimeException("Invalid json. Cannot find ApiMethodModel with name: " + mname);
}
JsonObject properties = (JsonObject) mmp.get("properties");
if (properties != null) {
for (Map.Entry<String, Object> pe : properties.entrySet()) {
JsonObject prop = (JsonObject) pe.getValue();
ComponentModel.ApiOptionModel option = new ComponentModel.ApiOptionModel();
parseOption(prop, option, pe.getKey());
option.setOptional(prop.getBooleanOrDefault("optional", false));
amm.addApiOptionModel(option);
}
}
}
}
}
}
return model;
}
public static void parseComponentModel(JsonObject mobj, ComponentModel model) {
parseModel(mobj, model);
model.setScheme(mobj.getString("scheme"));
model.setExtendsScheme(mobj.getString("extendsScheme"));
model.setAlternativeSchemes(mobj.getString("alternativeSchemes"));
model.setSyntax(mobj.getString("syntax"));
model.setAlternativeSyntax(mobj.getString("alternativeSyntax"));
model.setAsync(mobj.getBooleanOrDefault("async", false));
model.setApi(mobj.getBooleanOrDefault("api", false));
model.setApiSyntax(mobj.getString("apiSyntax"));
model.setConsumerOnly(mobj.getBooleanOrDefault("consumerOnly", false));
model.setProducerOnly(mobj.getBooleanOrDefault("producerOnly", false));
model.setLenientProperties(mobj.getBooleanOrDefault("lenientProperties", false));
model.setBrowsable(mobj.getBooleanOrDefault("browsable", false));
model.setRemote(mobj.getBooleanOrDefault("remote", false));
parseArtifact(mobj, model);
}
private static void parseArtifact(JsonObject mobj, ArtifactModel<?> model) {
model.setGroupId(mobj.getString("groupId"));
model.setArtifactId(mobj.getString("artifactId"));
model.setVersion(mobj.getString("version"));
}
public static String createParameterJsonSchema(ComponentModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject asJsonObject(ComponentModel model) {
JsonObject obj = new JsonObject();
baseToJson(model, obj);
artifactToJson(model, obj);
obj.put("scheme", model.getScheme());
obj.put("extendsScheme", model.getExtendsScheme());
obj.put("alternativeSchemes", model.getAlternativeSchemes());
obj.put("syntax", model.getSyntax());
obj.put("alternativeSyntax", model.getAlternativeSyntax());
obj.put("async", model.isAsync());
obj.put("api", model.isApi());
if (model.isApi()) {
obj.put("apiSyntax", model.getApiSyntax());
}
obj.put("consumerOnly", model.isConsumerOnly());
obj.put("producerOnly", model.isProducerOnly());
obj.put("lenientProperties", model.isLenientProperties());
obj.put("browsable", model.isBrowsable());
obj.put("remote", model.isRemote());
obj.put("verifiers", model.getVerifiers());
obj.entrySet().removeIf(e -> e.getValue() == null);
JsonObject wrapper = new JsonObject();
wrapper.put("component", obj);
wrapper.put("componentProperties", asJsonObject(model.getComponentOptions()));
final List<EndpointHeaderModel> headers = model.getEndpointHeaders();
if (!headers.isEmpty()) {
wrapper.put("headers", asJsonObject(headers));
}
wrapper.put("properties", asJsonObject(model.getEndpointOptions()));
if (!model.getApiOptions().isEmpty()) {
wrapper.put("apis", apiModelAsJsonObject(model.getApiOptions(), false));
wrapper.put("apiProperties", apiModelAsJsonObject(model.getApiOptions(), true));
}
return wrapper;
}
public static DataFormatModel generateDataFormatModel(String json) {
JsonObject obj = deserialize(json);
return generateDataFormatModel(obj);
}
public static DataFormatModel generateDataFormatModel(JsonObject obj) {
JsonObject mobj = (JsonObject) obj.get("dataformat");
DataFormatModel model = new DataFormatModel();
parseModel(mobj, model);
parseArtifact(mobj, model);
model.setModelName(mobj.getString("modelName"));
model.setModelJavaType(mobj.getString("modelJavaType"));
JsonObject mprp = (JsonObject) obj.get("properties");
for (Map.Entry<String, Object> entry : mprp.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
DataFormatOptionModel option = new DataFormatOptionModel();
parseOption(mp, option, entry.getKey());
model.addOption(option);
}
return model;
}
public static String createParameterJsonSchema(DataFormatModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject asJsonObject(DataFormatModel model) {
JsonObject obj = new JsonObject();
baseToJson(model, obj);
artifactToJson(model, obj);
obj.put("modelName", model.getModelName());
obj.put("modelJavaType", model.getModelJavaType());
obj.entrySet().removeIf(e -> e.getValue() == null);
JsonObject wrapper = new JsonObject();
wrapper.put("dataformat", obj);
wrapper.put("properties", asJsonObject(model.getOptions()));
return wrapper;
}
public static EipModel generateEipModel(String json) {
JsonObject obj = deserialize(json);
return generateEipModel(obj);
}
public static EipModel generateEipModel(JsonObject obj) {
JsonObject mobj = (JsonObject) obj.get("model");
EipModel model = new EipModel();
parseModel(mobj, model);
model.setAbstractModel(mobj.getBooleanOrDefault("abstract", false));
model.setInput(mobj.getBooleanOrDefault("input", false));
model.setOutput(mobj.getBooleanOrDefault("output", false));
JsonObject mprp = (JsonObject) obj.get("properties");
if (mprp != null) {
for (Map.Entry<String, Object> entry : mprp.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
EipOptionModel option = new EipOptionModel();
parseOption(mp, option, entry.getKey());
model.addOption(option);
}
}
mprp = (JsonObject) obj.get("exchangeProperties");
if (mprp != null) {
for (Map.Entry<String, Object> entry : mprp.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
EipOptionModel option = new EipOptionModel();
parseOption(mp, option, entry.getKey());
model.addExchangeProperty(option);
}
}
return model;
}
public static PojoBeanModel generatePojoBeanModel(String json) {
JsonObject obj = deserialize(json);
return generatePojoBeanModel(obj);
}
public static PojoBeanModel generatePojoBeanModel(JsonObject obj) {
JsonObject mobj = (JsonObject) obj.get("bean");
PojoBeanModel model = new PojoBeanModel();
parseModel(mobj, model);
parseArtifact(mobj, model);
JsonObject mprp = (JsonObject) mobj.get("properties");
if (mprp != null) {
for (Map.Entry<String, Object> entry : mprp.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
PojoBeanModel.PojoBeanOptionModel option = new PojoBeanModel.PojoBeanOptionModel();
parseOption(mp, option, entry.getKey());
model.addOption(option);
}
}
return model;
}
public static String createParameterJsonSchema(EipModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject asJsonObject(EipModel model) {
JsonObject obj = new JsonObject();
baseToJson(model, obj);
obj.put("abstract", model.isAbstractModel());
obj.put("input", model.isInput());
obj.put("output", model.isOutput());
obj.entrySet().removeIf(e -> e.getValue() == null);
JsonObject wrapper = new JsonObject();
wrapper.put("model", obj);
wrapper.put("properties", asJsonObject(model.getOptions()));
if (!model.getExchangeProperties().isEmpty()) {
wrapper.put("exchangeProperties", asJsonObject(model.getExchangeProperties()));
}
return wrapper;
}
public static String createParameterJsonSchema(PojoBeanModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject asJsonObject(PojoBeanModel model) {
JsonObject obj = new JsonObject();
baseToJson(model, obj);
artifactToJson(model, obj);
obj.entrySet().removeIf(e -> e.getValue() == null);
JsonObject wrapper = new JsonObject();
wrapper.put("bean", obj);
wrapper.put("properties", asJsonObject(model.getOptions()));
return wrapper;
}
public static LanguageModel generateLanguageModel(String json) {
JsonObject obj = deserialize(json);
return generateLanguageModel(obj);
}
public static LanguageModel generateLanguageModel(JsonObject obj) {
JsonObject mobj = (JsonObject) obj.get("language");
LanguageModel model = new LanguageModel();
parseModel(mobj, model);
model.setModelName(mobj.getString("modelName"));
model.setModelJavaType(mobj.getString("modelJavaType"));
parseArtifact(mobj, model);
JsonObject mprp = (JsonObject) obj.get("properties");
for (Map.Entry<String, Object> entry : mprp.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
LanguageOptionModel option = new LanguageOptionModel();
parseOption(mp, option, entry.getKey());
model.addOption(option);
}
JsonObject mprf = (JsonObject) obj.get("functions");
if (mprf != null) {
for (Map.Entry<String, Object> entry : mprf.entrySet()) {
JsonObject mp = (JsonObject) entry.getValue();
LanguageModel.LanguageFunctionModel func = new LanguageModel.LanguageFunctionModel();
parseFunction(mp, func, entry.getKey());
model.addFunction(func);
}
}
return model;
}
public static String createParameterJsonSchema(LanguageModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject asJsonObject(LanguageModel model) {
JsonObject obj = new JsonObject();
baseToJson(model, obj);
artifactToJson(model, obj);
obj.put("modelName", model.getModelName());
obj.put("modelJavaType", model.getModelJavaType());
obj.entrySet().removeIf(e -> e.getValue() == null);
JsonObject wrapper = new JsonObject();
wrapper.put("language", obj);
wrapper.put("properties", asJsonObject(model.getOptions()));
final List<LanguageModel.LanguageFunctionModel> functions = model.getFunctions();
if (!functions.isEmpty()) {
wrapper.put("functions", asJsonObjectFunctions(functions));
}
return wrapper;
}
public static TransformerModel generateTransformerModel(String json) {
JsonObject obj = deserialize(json);
return generateTransformerModel(obj);
}
public static TransformerModel generateTransformerModel(JsonObject obj) {
JsonObject mobj = (JsonObject) obj.get("transformer");
TransformerModel model = new TransformerModel();
parseModel(mobj, model);
model.setFrom(mobj.getString("from"));
model.setTo(mobj.getString("to"));
parseArtifact(mobj, model);
return model;
}
public static String createParameterJsonSchema(TransformerModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject asJsonObject(TransformerModel model) {
JsonObject obj = new JsonObject();
baseToJson(model, obj);
artifactToJson(model, obj);
obj.put("from", model.getFrom());
obj.put("to", model.getTo());
obj.entrySet().removeIf(e -> e.getValue() == null);
JsonObject wrapper = new JsonObject();
wrapper.put("transformer", obj);
return wrapper;
}
public static DevConsoleModel generateDevConsoleModel(String json) {
JsonObject obj = deserialize(json);
return generateDevConsoleModel(obj);
}
public static DevConsoleModel generateDevConsoleModel(JsonObject obj) {
JsonObject mobj = (JsonObject) obj.get("console");
DevConsoleModel model = new DevConsoleModel();
parseModel(mobj, model);
model.setGroup(mobj.getString("group"));
parseArtifact(mobj, model);
return model;
}
public static String createParameterJsonSchema(DevConsoleModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject asJsonObject(DevConsoleModel model) {
JsonObject obj = new JsonObject();
baseToJson(model, obj);
artifactToJson(model, obj);
obj.put("group", model.getGroup());
obj.entrySet().removeIf(e -> e.getValue() == null);
JsonObject wrapper = new JsonObject();
wrapper.put("console", obj);
return wrapper;
}
public static OtherModel generateOtherModel(String json) {
JsonObject obj = deserialize(json);
return generateOtherModel(obj);
}
public static OtherModel generateOtherModel(JsonObject obj) {
JsonObject mobj = (JsonObject) obj.get("other");
OtherModel model = new OtherModel();
parseModel(mobj, model);
parseArtifact(mobj, model);
return model;
}
public static String createJsonSchema(OtherModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject asJsonObject(OtherModel model) {
JsonObject obj = new JsonObject();
baseToJson(model, obj);
artifactToJson(model, obj);
obj.entrySet().removeIf(e -> e.getValue() == null);
JsonObject wrapper = new JsonObject();
wrapper.put("other", obj);
return wrapper;
}
private static void baseToJson(BaseModel<?> model, JsonObject obj) {
obj.put("kind", model.getKind());
obj.put("name", model.getName());
obj.put("title", model.getTitle());
obj.put("description", model.getDescription());
obj.put("deprecated", model.isDeprecated());
obj.put("deprecatedSince", model.getDeprecatedSince());
obj.put("deprecationNote", model.getDeprecationNote());
obj.put("firstVersion", model.getFirstVersion());
obj.put("label", model.getLabel());
obj.put("javaType", model.getJavaType());
if (model.getSupportLevel() != null) {
obj.put("supportLevel", model.getSupportLevel().name());
}
if (model.isNativeSupported()) {
obj.put("nativeSupported", model.isNativeSupported());
}
if (!model.getMetadata().isEmpty()) {
obj.put("metadata", model.getMetadata());
}
}
private static void artifactToJson(ArtifactModel<?> model, JsonObject obj) {
obj.put("groupId", model.getGroupId());
obj.put("artifactId", model.getArtifactId());
obj.put("version", model.getVersion());
}
private static void parseModel(JsonObject mobj, BaseModel<?> model) {
model.setTitle(mobj.getString("title"));
model.setName(mobj.getString("name"));
model.setDescription(mobj.getString("description"));
model.setFirstVersion(mobj.getString("firstVersion"));
model.setLabel(mobj.getString("label"));
model.setDeprecated(mobj.getBooleanOrDefault("deprecated", false));
model.setDeprecatedSince(mobj.getString("deprecatedSince"));
model.setDeprecationNote(mobj.getString("deprecationNote"));
model.setJavaType(mobj.getString("javaType"));
model.setSupportLevel(SupportLevel.safeValueOf(mobj.getString("supportLevel")));
model.setNativeSupported(mobj.getBooleanOrDefault("nativeSupported", false));
model.setMetadata(mobj.getMapOrDefault("metadata", new JsonObject()));
}
private static void parseOption(JsonObject mp, BaseOptionModel option, String name) {
option.setName(name);
Integer idx = mp.getInteger("index");
if (idx != null) {
option.setIndex(idx);
}
option.setKind(mp.getString("kind"));
option.setDisplayName(mp.getString("displayName"));
option.setGroup(mp.getString("group"));
option.setLabel(mp.getString("label"));
option.setRequired(mp.getBooleanOrDefault("required", false));
option.setType(mp.getString("type"));
option.setJavaType(mp.getString("javaType"));
option.setEnums(asStringList(mp.getCollection("enum")));
option.setOneOfs(asStringList(mp.getCollection("oneOf")));
option.setPrefix(mp.getString("prefix"));
option.setOptionalPrefix(mp.getString("optionalPrefix"));
option.setMultiValue(mp.getBooleanOrDefault("multiValue", false));
option.setDeprecated(mp.getBooleanOrDefault("deprecated", false));
option.setAutowired(mp.getBooleanOrDefault("autowired", false));
option.setDeprecationNote(mp.getString("deprecationNote"));
option.setSecret(mp.getBooleanOrDefault("secret", false));
option.setDefaultValue(mp.get("defaultValue"));
option.setAsPredicate(mp.getBooleanOrDefault("asPredicate", false));
option.setConfigurationClass(mp.getString("configurationClass"));
option.setConfigurationField(mp.getString("configurationField"));
option.setDescription(mp.getString("description"));
option.setGetterMethod(mp.getString("getterMethod"));
option.setSetterMethod(mp.getString("setterMethod"));
option.setSupportFileReference(mp.getBooleanOrDefault("supportFileReference", false));
option.setLargeInput(mp.getBooleanOrDefault("largeInput", false));
option.setInputLanguage(mp.getString("inputLanguage"));
option.setImportant(mp.getBooleanOrDefault("important", false));
}
private static void parseGroup(JsonObject mp, MainGroupModel option) {
option.setName(mp.getString("name"));
option.setDescription(mp.getString("description"));
option.setSourceType(mp.getString("sourceType"));
}
private static void parseGroup(JsonObject mp, JBangGroupModel option) {
option.setName(mp.getString("name"));
option.setDescription(mp.getString("description"));
option.setSourceType(mp.getString("sourceType"));
}
private static void parseFunction(JsonObject mp, LanguageModel.LanguageFunctionModel func, String name) {
func.setName(name);
func.setConstantName(name);
Integer idx = mp.getInteger("index");
if (idx != null) {
func.setIndex(idx);
}
func.setKind(mp.getString("kind"));
func.setDisplayName(mp.getString("displayName"));
func.setGroup(mp.getString("group"));
func.setLabel(mp.getString("label"));
func.setRequired(mp.getBooleanOrDefault("required", false));
func.setJavaType(mp.getString("javaType"));
func.setPrefix(mp.getString("prefix"));
func.setDeprecated(mp.getBooleanOrDefault("deprecated", false));
func.setDeprecationNote(mp.getString("deprecationNote"));
func.setDescription(mp.getString("description"));
func.setOgnl(mp.getBoolean("ognl"));
func.setSuffix(mp.getString("suffix"));
}
public static JsonObject asJsonObject(List<? extends BaseOptionModel> options) {
JsonObject json = new JsonObject();
for (int i = 0; i < options.size(); i++) {
var o = options.get(i);
o.setIndex(i);
json.put(o.getName(), asJsonObject(o));
}
return json;
}
public static JsonObject asJsonObjectFunctions(List<LanguageModel.LanguageFunctionModel> options) {
JsonObject json = new JsonObject();
for (int i = 0; i < options.size(); i++) {
var o = options.get(i);
o.setIndex(i);
JsonObject jo = asJsonObject(o);
jo.put("ognl", o.isOgnl());
if (o.getPrefix() != null) {
jo.put("prefix", o.getPrefix());
}
if (o.getSuffix() != null) {
jo.put("suffix", o.getSuffix());
}
json.put(o.getName(), jo);
}
return json;
}
public static JsonObject apiModelAsJsonObject(Collection<ApiModel> model, boolean options) {
JsonObject root = new JsonObject();
model.forEach(a -> {
JsonObject json = new JsonObject();
root.put(a.getName(), json);
if (!options) {
// lets be less verbose and only output these details for the api summary and not when we have all options included
json.put("consumerOnly", a.isConsumerOnly());
json.put("producerOnly", a.isProducerOnly());
if (a.getDescription() != null) {
json.put("description", a.getDescription());
}
if (!a.getAliases().isEmpty()) {
json.put("aliases", new JsonArray(a.getAliases()));
}
}
Map<String, JsonObject> methods = new TreeMap<>();
json.put("methods", methods);
a.getMethods().forEach(m -> {
JsonObject mJson = new JsonObject();
if (!options) {
// lets be less verbose and only output these details for the api summary and not when we have all options included
if (m.getDescription() != null) {
mJson.put("description", m.getDescription());
}
if (!m.getSignatures().isEmpty()) {
mJson.put("signatures", new JsonArray(m.getSignatures()));
}
}
if (options) {
mJson.put("properties", asJsonObject(m.getOptions()));
}
methods.put(m.getName(), mJson);
});
});
return root;
}
public static JsonObject asJsonObject(BaseOptionModel option) {
JsonObject prop = new JsonObject();
prop.put("index", option.getIndex());
prop.put("kind", option.getKind());
prop.put("displayName", option.getDisplayName());
prop.put("group", option.getGroup());
prop.put("label", option.getLabel());
prop.put("required", option.isRequired());
prop.put("type", option.getType());
prop.put("javaType", option.getJavaType());
prop.put("enum", option.getEnums());
prop.put("oneOf", option.getOneOfs());
prop.put("prefix", option.getPrefix());
prop.put("optionalPrefix", option.getOptionalPrefix());
prop.put("multiValue", option.isMultiValue());
prop.put("deprecated", option.isDeprecated());
prop.put("deprecationNote", option.getDeprecationNote());
prop.put("autowired", option.isAutowired());
prop.put("secret", option.isSecret());
if (option.getDefaultValue() != null) {
prop.put("defaultValue", option.resolveDefaultValue());
}
if (option.isSupportFileReference()) {
// only include if supported to not regen all files
prop.put("supportFileReference", option.isSupportFileReference());
}
if (option.isLargeInput()) {
// only include if supported to not regen all files
prop.put("largeInput", option.isLargeInput());
}
if (!Strings.isNullOrEmpty(option.getInputLanguage())) {
// only include if supported to not regen all files
prop.put("inputLanguage", option.getInputLanguage());
}
if (option.isImportant()) {
// only include if supported to not regen all files
prop.put("important", option.isImportant());
}
prop.put("asPredicate", option.isAsPredicate());
prop.put("configurationClass", option.getConfigurationClass());
prop.put("configurationField", option.getConfigurationField());
prop.put("description", option.getDescription());
prop.put("getterMethod", option.getGetterMethod());
prop.put("setterMethod", option.getSetterMethod());
if (option instanceof ComponentModel.ApiOptionModel) {
prop.put("optional", ((ComponentModel.ApiOptionModel) option).isOptional());
} else if (option instanceof ComponentModel.EndpointHeaderModel) {
prop.put("constantName", ((ComponentModel.EndpointHeaderModel) option).getConstantName());
}
prop.entrySet().removeIf(e -> e.getValue() == null);
prop.remove("prefix", "");
prop.remove("optionalPrefix", "");
prop.remove("defaultValue", "");
prop.remove("multiValue", Boolean.FALSE);
prop.remove("asPredicate", Boolean.FALSE);
return prop;
}
public static MainModel generateMainModel(String json) {
JsonObject obj = deserialize(json);
return generateMainModel(obj);
}
public static MainModel generateMainModel(JsonObject obj) {
MainModel model = new MainModel();
JsonArray mgrp = (JsonArray) obj.get("groups");
for (Object entry : mgrp) {
JsonObject mg = (JsonObject) entry;
MainGroupModel group = new MainGroupModel();
parseGroup(mg, group);
model.addGroup(group);
}
JsonArray mprp = (JsonArray) obj.get("properties");
for (Object entry : mprp) {
JsonObject mp = (JsonObject) entry;
MainOptionModel option = new MainOptionModel();
parseOption(mp, option, mp.getString("name"));
option.setSourceType(mp.getString("sourceType"));
model.addOption(option);
}
return model;
}
public static JBangModel generateJBangModel(String json) {
JsonObject obj = deserialize(json);
return generateJBangModel(obj);
}
public static JBangModel generateJBangModel(JsonObject obj) {
JBangModel model = new JBangModel();
JsonArray mgrp = (JsonArray) obj.get("groups");
for (Object entry : mgrp) {
JsonObject mg = (JsonObject) entry;
JBangGroupModel group = new JBangGroupModel();
parseGroup(mg, group);
model.addGroup(group);
}
JsonArray mprp = (JsonArray) obj.get("properties");
for (Object entry : mprp) {
JsonObject mp = (JsonObject) entry;
JBangOptionModel option = new JBangOptionModel();
parseOption(mp, option, mp.getString("name"));
option.setSourceType(mp.getString("sourceType"));
model.addOption(option);
}
return model;
}
public static JsonObject asJsonObject(MainModel model) {
JsonObject json = new JsonObject();
JsonArray groups = new JsonArray();
for (MainGroupModel group : model.getGroups()) {
JsonObject j = new JsonObject();
j.put("name", group.getName());
if (group.getDescription() != null) {
j.put("description", group.getDescription());
}
if (group.getSourceType() != null) {
j.put("sourceType", group.getSourceType());
}
groups.add(j);
}
json.put("groups", groups);
JsonArray props = new JsonArray();
for (MainOptionModel prop : model.getOptions()) {
JsonObject j = new JsonObject();
j.put("name", prop.getName());
j.put("required", prop.isRequired());
if (prop.getDescription() != null) {
j.put("description", prop.getDescription());
}
if (prop.getGroup() != null) {
j.put("group", prop.getGroup());
}
if (prop.getLabel() != null) {
j.put("label", prop.getLabel());
}
if (prop.getSourceType() != null) {
j.put("sourceType", prop.getSourceType());
}
j.put("type", prop.getType());
j.put("javaType", prop.getJavaType());
if (prop.getDefaultValue() != null) {
j.put("defaultValue", prop.resolveDefaultValue());
}
j.put("secret", prop.isSecret());
if (prop.getEnums() != null) {
j.put("enum", prop.getEnums());
}
if (prop.isDeprecated()) {
j.put("deprecated", prop.isDeprecated());
}
if (prop.isAutowired()) {
j.put("autowired", prop.isAutowired());
}
props.add(j);
}
json.put("properties", props);
return json;
}
public static JsonObject asJsonObject(JBangModel model) {
JsonObject json = new JsonObject();
JsonArray groups = new JsonArray();
for (JBangGroupModel group : model.getGroups()) {
JsonObject j = new JsonObject();
j.put("name", group.getName());
if (group.getDescription() != null) {
j.put("description", group.getDescription());
}
if (group.getSourceType() != null) {
j.put("sourceType", group.getSourceType());
}
groups.add(j);
}
json.put("groups", groups);
JsonArray props = new JsonArray();
for (JBangOptionModel prop : model.getOptions()) {
JsonObject j = new JsonObject();
j.put("name", prop.getName());
j.put("required", prop.isRequired());
if (prop.getDescription() != null) {
j.put("description", prop.getDescription());
}
if (prop.getGroup() != null) {
j.put("group", prop.getGroup());
}
if (prop.getLabel() != null) {
j.put("label", prop.getLabel());
}
if (prop.getSourceType() != null) {
j.put("sourceType", prop.getSourceType());
}
j.put("type", prop.getType());
j.put("javaType", prop.getJavaType());
if (prop.getDefaultValue() != null) {
j.put("defaultValue", prop.resolveDefaultValue());
}
j.put("secret", prop.isSecret());
if (prop.getEnums() != null) {
j.put("enum", prop.getEnums());
}
if (prop.isDeprecated()) {
j.put("deprecated", prop.isDeprecated());
}
if (prop.isAutowired()) {
j.put("autowired", prop.isAutowired());
}
props.add(j);
}
json.put("properties", props);
return json;
}
public static JsonObject asJsonObject(ReleaseModel model) {
JsonObject json = new JsonObject();
json.put("version", model.getVersion());
json.put("date", model.getDate());
if (model.getEol() != null) {
json.put("eol", model.getEol());
}
if (model.getKind() != null) {
json.put("kind", model.getKind());
}
if (model.getJdk() != null) {
json.put("jdk", model.getJdk());
}
return json;
}
public static ReleaseModel generateReleaseModel(JsonObject obj) {
ReleaseModel model = new ReleaseModel();
model.setVersion(obj.getString("version"));
model.setDate(obj.getString("date"));
model.setEol(obj.getString("eol"));
model.setKind(obj.getString("kind"));
model.setJdk(obj.getString("jdk"));
return model;
}
public static String createJsonSchema(MainModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static String createJsonSchema(JBangModel model) {
JsonObject wrapper = asJsonObject(model);
return serialize(wrapper);
}
public static JsonObject deserialize(String json) {
try {
return (JsonObject) Jsoner.deserialize(json);
} catch (Exception e) {
// wrap parsing exceptions as runtime
throw new RuntimeException("Cannot parse json", e);
}
}
public static String serialize(Object json) {
return Jsoner.prettyPrint(Jsoner.serialize(json), 2, 2);
}
protected static List<String> asStringList(Collection<?> col) {
if (col != null) {
return col.stream().map(Object::toString).collect(Collectors.toList());
} else {
return null;
}
}
}
| JsonMapper |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2300/Issue2300.java | {
"start": 1014,
"end": 1108
} | class ____ {
@JSONField(format = "unixtime")
public Date createTime;
}
}
| Order |
java | apache__camel | components/camel-jackson/src/test/java/org/apache/camel/component/jackson/JacksonObjectMapperRegistryTest.java | {
"start": 1306,
"end": 2659
} | class ____ extends CamelTestSupport {
private JacksonDataFormat df;
@BindToRegistry("myMapper")
private ObjectMapper objectMapper = new ObjectMapper();
@Test
public void testMarshalAndUnmarshalMap() throws Exception {
Map<String, Object> in = new HashMap<>();
in.put("name", "Camel");
MockEndpoint mock = getMockEndpoint("mock:reverse");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(Map.class);
mock.message(0).body().isEqualTo(in);
Object marshalled = template.requestBody("direct:in", in);
String marshalledAsString = context.getTypeConverter().convertTo(String.class, marshalled);
assertEquals("{\"name\":\"Camel\"}", marshalledAsString);
template.sendBody("direct:back", marshalled);
mock.assertIsSatisfied();
assertSame(objectMapper, df.getObjectMapper());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
df = new JacksonDataFormat();
df.setAutoDiscoverObjectMapper(true);
from("direct:in").marshal(df);
from("direct:back").unmarshal(df).to("mock:reverse");
}
};
}
}
| JacksonObjectMapperRegistryTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalUtil.java | {
"start": 2206,
"end": 3117
} | class ____ {
private static final DynMethods.UnboundMethod GET_NUMBER_UNBOUND_METHOD =
new DynMethods.Builder("getNumber").impl(JDK_SIGNAL_CLAZZ).build();
private static final DynMethods.UnboundMethod GET_NAME_UNBOUND_METHOD =
new DynMethods.Builder("getName").impl(JDK_SIGNAL_CLAZZ).build();
private final Object/* sun.misc.Signal */ delegate;
private final DynMethods.BoundMethod getNumberMethod;
private final DynMethods.BoundMethod getNameMethod;
public Signal(String name) {
Preconditions.checkNotNull(name);
this.delegate = JDK_SIGNAL_CTOR.newInstance(name);
this.getNumberMethod = GET_NUMBER_UNBOUND_METHOD.bind(delegate);
this.getNameMethod = GET_NAME_UNBOUND_METHOD.bind(delegate);
}
public Signal(Object delegate) {
Preconditions.checkArgument(JDK_SIGNAL_CLAZZ.isInstance(delegate),
String.format("Expected | Signal |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/internal/InheritanceState.java | {
"start": 9618,
"end": 12589
} | class ____ declare or inherit at least one '@Id' or '@EmbeddedId' property)"
);
}
private void getMappedSuperclassesTillNextEntityOrdered() {
//ordered to allow proper messages on properties subclassing
ClassDetails currentClassInHierarchy = classDetails;
InheritanceState superclassState;
do {
classesToProcessForMappedSuperclass.add( 0, currentClassInHierarchy );
ClassDetails superClass = currentClassInHierarchy;
do {
superClass = superClass.getSuperClass();
superclassState = inheritanceStatePerClass.get( superClass );
}
while ( superClass != null
&& !OBJECT_CLASS_NAME.equals( superClass.getClassName() )
&& superclassState == null );
currentClassInHierarchy = superClass;
}
while ( superclassState != null && superclassState.isEmbeddableSuperclass() );
}
private void addMappedSuperClassInMetadata(Component component) {
final var mappedSuperclass = processMappedSuperclass( component.getTable() );
if ( mappedSuperclass != null ) {
component.setMappedSuperclass( mappedSuperclass );
}
}
private void addMappedSuperClassInMetadata(PersistentClass persistentClass) {
final var mappedSuperclass = processMappedSuperclass( persistentClass.getImplicitTable() );
if ( mappedSuperclass != null ) {
persistentClass.setSuperMappedSuperclass( mappedSuperclass );
}
}
private org.hibernate.mapping.MappedSuperclass processMappedSuperclass(Table implicitTable) {
//add @MappedSuperclass in the metadata
// classes from 0 to n-1 are @MappedSuperclass and should be linked
final var metadataCollector = buildingContext.getMetadataCollector();
final var superEntityState = getInheritanceStateOfSuperEntity( classDetails, inheritanceStatePerClass );
final PersistentClass superEntity =
superEntityState != null
? metadataCollector.getEntityBinding( superEntityState.getClassDetails().getName() )
: null;
final int lastMappedSuperclass = classesToProcessForMappedSuperclass.size() - 1;
org.hibernate.mapping.MappedSuperclass mappedSuperclass = null;
for ( int index = 0; index < lastMappedSuperclass; index++ ) {
final var parentSuperclass = mappedSuperclass;
// todo (jpa32) : causes the mapped-superclass Class reference to be loaded...
// - but this is how it's always worked, so...
final var mappedSuperclassDetails = classesToProcessForMappedSuperclass.get( index );
final var mappedSuperclassJavaType = mappedSuperclassDetails.toJavaClass();
//add MappedSuperclass if not already there
mappedSuperclass = metadataCollector.getMappedSuperclass( mappedSuperclassJavaType );
if ( mappedSuperclass == null ) {
mappedSuperclass = new org.hibernate.mapping.MappedSuperclass( parentSuperclass, superEntity, implicitTable );
mappedSuperclass.setMappedClass( mappedSuperclassJavaType );
metadataCollector.addMappedSuperclass( mappedSuperclassJavaType, mappedSuperclass );
}
}
return mappedSuperclass;
}
public static final | must |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/Association.java | {
"start": 309,
"end": 596
} | interface ____ extends Fetchable {
/**
* The descriptor, allowing access to column(s), etc
*/
ForeignKeyDescriptor getForeignKeyDescriptor();
/**
* Indicates which "side" of the foreign-key this association describes
*/
ForeignKeyDescriptor.Nature getSideNature();
}
| Association |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java | {
"start": 1238,
"end": 6171
} | class ____ implements Writeable, ToXContentObject {
public static final ParseField TYPE = new ParseField("type");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField BREAKDOWN = new ParseField("breakdown");
public static final ParseField DEBUG = new ParseField("debug");
static final ParseField NODE_TIME = new ParseField("time");
public static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos");
public static final ParseField CHILDREN = new ParseField("children");
private final String type;
private final String description;
private final Map<String, Long> breakdown;
private final Map<String, Object> debug;
private final long nodeTime;
private final List<ProfileResult> children;
public ProfileResult(
String type,
String description,
Map<String, Long> breakdown,
Map<String, Object> debug,
long nodeTime,
List<ProfileResult> children
) {
this.type = type;
this.description = description;
this.breakdown = Objects.requireNonNull(breakdown, "required breakdown argument missing");
this.debug = debug == null ? Map.of() : debug;
this.children = children == null ? List.of() : children;
this.nodeTime = nodeTime;
}
/**
* Read from a stream.
*/
public ProfileResult(StreamInput in) throws IOException {
this.type = in.readString();
this.description = in.readString();
this.nodeTime = in.readLong();
breakdown = in.readMap(StreamInput::readLong);
debug = in.readMap(StreamInput::readGenericValue);
children = in.readCollectionAsList(ProfileResult::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(type);
out.writeString(description);
out.writeLong(nodeTime); // not Vlong because can be negative
out.writeMap(breakdown, StreamOutput::writeLong);
out.writeMap(debug, StreamOutput::writeGenericValue);
out.writeCollection(children);
}
/**
* Retrieve the lucene description of this query (e.g. the "explain" text)
*/
public String getLuceneDescription() {
return description;
}
/**
* Retrieve the name of the entry (e.g. "TermQuery" or "LongTermsAggregator")
*/
public String getQueryName() {
return type;
}
/**
* The timing breakdown for this node.
*/
public Map<String, Long> getTimeBreakdown() {
return Collections.unmodifiableMap(breakdown);
}
/**
* The debug information about the profiled execution.
*/
public Map<String, Object> getDebugInfo() {
return Collections.unmodifiableMap(debug);
}
/**
* Returns the total time (inclusive of children) for this query node.
*
* @return elapsed time in nanoseconds
*/
public long getTime() {
return nodeTime;
}
/**
* Returns a list of all profiled children queries
*/
public List<ProfileResult> getProfiledChildren() {
return Collections.unmodifiableList(children);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(TYPE.getPreferredName(), type);
builder.field(DESCRIPTION.getPreferredName(), description);
if (builder.humanReadable()) {
builder.field(NODE_TIME.getPreferredName(), new TimeValue(getTime(), TimeUnit.NANOSECONDS).toString());
}
builder.field(NODE_TIME_RAW.getPreferredName(), getTime());
builder.field(BREAKDOWN.getPreferredName(), breakdown);
if (false == debug.isEmpty()) {
builder.field(DEBUG.getPreferredName(), debug);
}
if (false == children.isEmpty()) {
builder.startArray(CHILDREN.getPreferredName());
for (ProfileResult child : children) {
builder = child.toXContent(builder, params);
}
builder.endArray();
}
return builder.endObject();
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
ProfileResult other = (ProfileResult) obj;
return type.equals(other.type)
&& description.equals(other.description)
&& breakdown.equals(other.breakdown)
&& debug.equals(other.debug)
&& nodeTime == other.nodeTime
&& children.equals(other.children);
}
@Override
public int hashCode() {
return Objects.hash(type, description, breakdown, debug, nodeTime, children);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
| ProfileResult |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/TruthIncompatibleTypeTest.java | {
"start": 5537,
"end": 5991
} | class ____ {
public void f(Iterable<Long> xs, String x) {
// BUG: Diagnostic contains:
assertThat(xs).contains(x);
}
}
""")
.doTest();
}
@Test
public void containment_noMatch() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
public | Test |
java | quarkusio__quarkus | independent-projects/bootstrap/core/src/test/java/io/quarkus/bootstrap/resolver/CollectDependenciesBase.java | {
"start": 602,
"end": 6876
} | class ____ extends ResolverSetupCleanup {
protected TsArtifact root;
protected List<Dependency> expectedResult = List.of();
protected List<Dependency> deploymentDeps = List.of();
@Override
@BeforeEach
public void setup() throws Exception {
super.setup();
root = new TsArtifact("root");
setupDependencies();
}
protected abstract void setupDependencies() throws Exception;
@Test
public void testCollectedDependencies() throws Exception {
install(root);
List<Dependency> expected;
if (deploymentDeps.isEmpty()) {
expected = expectedResult;
} else {
expected = new ArrayList<>(expectedResult.size() + deploymentDeps.size());
expected.addAll(expectedResult);
expected.addAll(deploymentDeps);
}
final Collection<ResolvedDependency> buildDeps = getTestResolver().resolveModel(root.toArtifact()).getDependencies();
assertThat(stripResolvedPaths(buildDeps)).containsExactlyInAnyOrderElementsOf(expected);
assertBuildDependencies(buildDeps);
}
protected void assertBuildDependencies(Collection<ResolvedDependency> buildDeps) {
}
private static List<Dependency> stripResolvedPaths(Collection<ResolvedDependency> deps) {
final List<Dependency> result = new ArrayList<>(deps.size());
for (var dep : deps) {
result.add(new ArtifactDependency(dep));
}
return result;
}
protected BootstrapAppModelResolver getTestResolver() throws Exception {
return resolver;
}
protected Path getInstallDir(TsArtifact artifact) {
return getInstallDir().resolve(artifact.getGroupId().replace('.', '/')).resolve(artifact.getArtifactId())
.resolve(artifact.getVersion());
}
protected TsArtifact install(TsArtifact dep, boolean collected) {
return install(dep, collected ? JavaScopes.COMPILE : null);
}
protected TsArtifact install(TsArtifact dep, String collectedInScope) {
return install(dep, null, collectedInScope, false);
}
protected TsArtifact install(TsArtifact dep, String collectedInScope, boolean optional) {
return install(dep, null, collectedInScope, optional);
}
protected TsArtifact install(TsArtifact dep, Path p, boolean collected) {
return install(dep, p, collected ? JavaScopes.COMPILE : null, false);
}
protected TsArtifact install(TsArtifact dep, Path p, String collectedInScope, boolean optional) {
install(dep, p);
if (collectedInScope != null) {
addCollectedDep(dep, collectedInScope, optional);
}
return dep;
}
protected TsQuarkusExt install(TsQuarkusExt ext) {
install(ext, true);
return ext;
}
protected void install(TsQuarkusExt ext, boolean collected) {
ext.install(repo);
if (collected) {
addCollectedDep(ext.getRuntime(), JavaScopes.COMPILE, false, DependencyFlags.RUNTIME_EXTENSION_ARTIFACT);
addCollectedDeploymentDep(ext.getDeployment());
}
}
protected void installAsDep(TsQuarkusExt ext) {
ext.install(repo);
root.addDependency(ext);
addCollectedDep(ext.getRuntime(), JavaScopes.COMPILE, false,
DependencyFlags.DIRECT | DependencyFlags.RUNTIME_EXTENSION_ARTIFACT
| DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT);
addCollectedDeploymentDep(ext.getDeployment());
}
protected void installAsDep(TsArtifact dep, int... flags) {
installAsDep(dep, true, flags);
}
protected void installAsDep(TsArtifact dep, boolean collected, int... flags) {
installAsDep(dep, null, collected, flags);
}
protected void installAsDep(TsArtifact dep, Path p, boolean collected, int... flags) {
installAsDep(new TsDependency(dep), p, collected, flags);
}
protected void installAsDep(TsDependency dep) {
installAsDep(dep, null);
}
protected void installAsDep(TsDependency dep, Path p) {
installAsDep(dep, p, true);
}
protected void installAsDep(TsDependency dep, boolean collected) {
installAsDep(dep, null, collected);
}
protected void installAsDep(TsDependency dep, Path p, boolean collected, int... flags) {
final TsArtifact artifact = dep.artifact;
install(artifact, p);
root.addDependency(dep);
if (!collected) {
return;
}
int allFlags = DependencyFlags.DIRECT;
for (int f : flags) {
allFlags |= f;
}
addCollectedDep(artifact, dep.scope == null ? JavaScopes.COMPILE : dep.scope, dep.optional, allFlags);
}
protected void addCollectedDep(final TsArtifact artifact, int... flags) {
addCollectedDep(artifact, JavaScopes.COMPILE, false, flags);
}
protected void addCollectedDep(final TsArtifact artifact, final String scope, boolean optional, int... flags) {
int allFlags = DependencyFlags.RUNTIME_CP | DependencyFlags.DEPLOYMENT_CP;
if (optional) {
allFlags |= DependencyFlags.OPTIONAL;
}
for (int f : flags) {
allFlags |= f;
}
if (expectedResult.isEmpty()) {
expectedResult = new ArrayList<>();
}
expectedResult.add(new ArtifactDependency(artifact.toArtifact(), scope, allFlags));
}
protected void addCollectedDeploymentDep(TsArtifact ext) {
if (deploymentDeps.isEmpty()) {
deploymentDeps = new ArrayList<>();
}
deploymentDeps
.add(new ArtifactDependency(ext.toArtifact(), JavaScopes.COMPILE,
DependencyFlags.DEPLOYMENT_CP));
}
protected void addManagedDep(TsQuarkusExt ext) {
addManagedDep(ext.runtime);
addManagedDep(ext.deployment);
}
protected void addManagedDep(TsArtifact dep) {
root.addManagedDependency(new TsDependency(dep));
}
protected void addDep(TsArtifact dep) {
root.addDependency(dep);
}
protected void setPomProperty(String name, String value) {
root.setPomProperty(name, value);
}
}
| CollectDependenciesBase |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java | {
"start": 2826,
"end": 3191
} | enum ____.
*
* @param columnFamily that this column is stored in.
* @param columnPrefix for this column.
*/
private ApplicationColumnPrefix(ColumnFamily<ApplicationTable> columnFamily,
String columnPrefix) {
this(columnFamily, columnPrefix, GenericConverter.getInstance());
}
/**
* Private constructor, meant to be used by the | definition |
java | spring-projects__spring-boot | core/spring-boot-docker-compose/src/main/java/org/springframework/boot/docker/compose/core/DockerCli.java | {
"start": 1496,
"end": 4672
} | class ____ {
private static final Map<@Nullable File, DockerCommands> dockerCommandsCache = new HashMap<>();
private static final Log logger = LogFactory.getLog(DockerCli.class);
private final ProcessRunner processRunner;
private final DockerCommands dockerCommands;
private final DockerComposeOptions dockerComposeOptions;
private final ComposeVersion composeVersion;
/**
* Create a new {@link DockerCli} instance.
* @param workingDirectory the working directory or {@code null}
* @param dockerComposeOptions the Docker Compose options to use or {@code null}.
*/
DockerCli(@Nullable File workingDirectory, @Nullable DockerComposeOptions dockerComposeOptions) {
this.processRunner = new ProcessRunner(workingDirectory);
this.dockerCommands = dockerCommandsCache.computeIfAbsent(workingDirectory,
(key) -> new DockerCommands(this.processRunner));
this.dockerComposeOptions = (dockerComposeOptions != null) ? dockerComposeOptions : DockerComposeOptions.none();
this.composeVersion = ComposeVersion.of(this.dockerCommands.get(Type.DOCKER_COMPOSE).version());
}
/**
* Run the given {@link DockerCli} command and return the response.
* @param <R> the response type
* @param dockerCommand the command to run
* @return the response
*/
<R> R run(DockerCliCommand<R> dockerCommand) {
List<String> command = createCommand(dockerCommand.getType());
command.addAll(dockerCommand.getCommand(this.composeVersion));
Consumer<String> outputConsumer = createOutputConsumer(dockerCommand.getLogLevel());
String json = this.processRunner.run(outputConsumer, command.toArray(new String[0]));
return dockerCommand.deserialize(json);
}
private @Nullable Consumer<String> createOutputConsumer(@Nullable LogLevel logLevel) {
if (logLevel == null || logLevel == LogLevel.OFF) {
return null;
}
return (line) -> logLevel.log(logger, line);
}
private List<String> createCommand(Type type) {
return switch (type) {
case DOCKER -> new ArrayList<>(this.dockerCommands.get(type).command());
case DOCKER_COMPOSE -> {
List<String> result = new ArrayList<>(this.dockerCommands.get(type).command());
DockerComposeFile composeFile = this.dockerComposeOptions.composeFile();
if (composeFile != null) {
for (File file : composeFile.getFiles()) {
result.add("--file");
result.add(file.getPath());
}
}
result.add("--ansi");
result.add("never");
Set<String> activeProfiles = this.dockerComposeOptions.activeProfiles();
if (!CollectionUtils.isEmpty(activeProfiles)) {
for (String profile : activeProfiles) {
result.add("--profile");
result.add(profile);
}
}
List<String> arguments = this.dockerComposeOptions.arguments();
if (!CollectionUtils.isEmpty(arguments)) {
result.addAll(arguments);
}
yield result;
}
};
}
/**
* Return the {@link DockerComposeFile} being used by this CLI instance.
* @return the Docker Compose file
*/
@Nullable DockerComposeFile getDockerComposeFile() {
return this.dockerComposeOptions.composeFile();
}
/**
* Holds details of the actual CLI commands to invoke.
*/
private static | DockerCli |
java | apache__flink | flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManagerDriver.java | {
"start": 28751,
"end": 30809
} | enum ____ a YARN application status enum.
*
* @param status The Flink application status.
* @return The corresponding YARN application status.
*/
private FinalApplicationStatus getYarnStatus(ApplicationStatus status) {
if (status == null) {
return FinalApplicationStatus.UNDEFINED;
} else {
switch (status) {
case SUCCEEDED:
return FinalApplicationStatus.SUCCEEDED;
case FAILED:
return FinalApplicationStatus.FAILED;
case CANCELED:
return FinalApplicationStatus.KILLED;
default:
return FinalApplicationStatus.UNDEFINED;
}
}
}
@VisibleForTesting
private static ResourceID getContainerResourceId(Container container) {
return new ResourceID(container.getId().toString(), container.getNodeId().toString());
}
private Map<Priority, List<Container>> groupContainerByPriority(List<Container> containers) {
return containers.stream().collect(Collectors.groupingBy(Container::getPriority));
}
private void checkInitialized() {
Preconditions.checkState(
taskExecutorProcessSpecContainerResourcePriorityAdapter != null,
"Driver not initialized.");
}
// ------------------------------------------------------------------------
// Callback handlers
// ------------------------------------------------------------------------
private void runAsyncWithFatalHandler(Runnable runnable) {
getMainThreadExecutor()
.execute(
() -> {
try {
runnable.run();
} catch (Throwable t) {
if (isRunning) {
getResourceEventHandler().onError(t);
}
}
});
}
| to |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/clause/MySqlFormatName.java | {
"start": 680,
"end": 731
} | enum ____ {
TRADITIONAL,
JSON
}
| MySqlFormatName |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java | {
"start": 18165,
"end": 18347
} | interface ____ {
ParseField MANAGE = new ParseField("manage");
ParseField APPLICATIONS = new ParseField("applications");
}
}
public static | Fields |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/client/AbstractMockWebServerTests.java | {
"start": 1235,
"end": 1672
} | class ____ {
private MockWebServer server;
protected int port;
protected String baseUrl;
@BeforeEach
void setUp() throws Exception {
this.server = new MockWebServer();
this.server.setDispatcher(new TestDispatcher());
this.server.start();
this.port = this.server.getPort();
this.baseUrl = "http://localhost:" + this.port;
}
@AfterEach
void tearDown() {
this.server.close();
}
protected | AbstractMockWebServerTests |
java | micronaut-projects__micronaut-core | http-client-core/src/main/java/io/micronaut/http/client/DefaultHttpClientConfiguration.java | {
"start": 5620,
"end": 5912
} | class ____ extends ConnectionPoolConfiguration {
}
/**
* The default WebSocket compression configuration.
*/
@ConfigurationProperties(WebSocketCompressionConfiguration.PREFIX)
@BootstrapContextCompatible
@Primary
public static | DefaultConnectionPoolConfiguration |
java | quarkusio__quarkus | extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/PersistenceUnitExtension.java | {
"start": 1495,
"end": 1564
} | interface ____ {
PersistenceUnitExtension[] value();
}
}
| List |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/asyncprocessing/operators/windowing/functions/InternalAggregateProcessAsyncWindowFunction.java | {
"start": 2112,
"end": 4773
} | class ____<T, ACC, V, R, K, W extends Window>
extends WrappingFunction<ProcessWindowFunction<V, R, K, W>>
implements InternalAsyncWindowFunction<StateIterator<T>, R, K, W> {
private static final long serialVersionUID = 1L;
private final AggregateFunction<T, ACC, V> aggFunction;
public InternalAggregateProcessAsyncWindowFunction(
AggregateFunction<T, ACC, V> aggFunction,
ProcessWindowFunction<V, R, K, W> windowFunction) {
super(windowFunction);
this.aggFunction = aggFunction;
}
@Override
public StateFuture<Void> process(
K key,
final W window,
final InternalWindowContext context,
StateIterator<T> input,
Collector<R> out)
throws Exception {
InternalProcessWindowContext<V, R, K, W> ctx =
new InternalProcessWindowContext<>(wrappedFunction);
ctx.window = window;
ctx.internalContext = context;
AtomicReference<ACC> finalAcc = new AtomicReference<>(aggFunction.createAccumulator());
return input.onNext(
val -> {
finalAcc.set(aggFunction.add(val, finalAcc.get()));
})
.thenAccept(
ignore -> {
ProcessWindowFunction<V, R, K, W> wrappedFunction =
this.wrappedFunction;
wrappedFunction.process(
key,
ctx,
Collections.singletonList(
aggFunction.getResult(finalAcc.get())),
out);
});
}
@Override
public StateFuture<Void> clear(final W window, final InternalWindowContext context)
throws Exception {
InternalProcessWindowContext<V, R, K, W> ctx =
new InternalProcessWindowContext<>(wrappedFunction);
ctx.window = window;
ctx.internalContext = context;
ProcessWindowFunction<V, R, K, W> wrappedFunction = this.wrappedFunction;
wrappedFunction.clear(ctx);
return StateFutureUtils.completedVoidFuture();
}
@Override
public RuntimeContext getRuntimeContext() {
throw new RuntimeException("This should never be called.");
}
@Override
public IterationRuntimeContext getIterationRuntimeContext() {
throw new RuntimeException("This should never be called.");
}
}
| InternalAggregateProcessAsyncWindowFunction |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/postgresql/issues/Issue5412.java | {
"start": 712,
"end": 5183
} | class ____ {
@Test
public void test_analyze_emptytable() throws Exception {
for (DbType dbType : new DbType[]{DbType.postgresql, DbType.greenplum, DbType.edb}) {
for (String sql : new String[]{
"analyze ",
}) {
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
SQLStatement statement = parser.parseStatement();
System.out.println(dbType + "原始的sql===" + sql);
System.out.println(dbType + "生成的sql===" + statement);
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(dbType);
statement.accept(visitor);
System.out.println(dbType + "getTables==" + visitor.getTables());
Map<TableStat.Name, TableStat> tableMap = visitor.getTables();
assertTrue(tableMap.isEmpty());
}
}
}
@Test
public void test_analyze() throws Exception {
for (DbType dbType : new DbType[]{DbType.postgresql, DbType.greenplum, DbType.edb}) {
for (String sql : new String[]{
"analyze WORK.TABLE1;",
"analyze WORK.TABLE1,WORK.TABLE2;",
"analyze VERBOSE WORK.TABLE1,WORK.TABLE2;",
"analyze VERBOSE SKIP_LOCKED WORK.TABLE1;",
"analyze SKIP_LOCKED WORK.TABLE1;",
}) {
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
SQLStatement statement = parser.parseStatement();
System.out.println(dbType + "原始的sql===" + sql);
System.out.println(dbType + "生成的sql===" + statement);
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(dbType);
statement.accept(visitor);
System.out.println(dbType + "getTables==" + visitor.getTables());
Map<TableStat.Name, TableStat> tableMap = visitor.getTables();
assertFalse(tableMap.isEmpty());
}
}
}
@Test
public void test_vacuum_empty() throws Exception {
for (DbType dbType : new DbType[]{DbType.postgresql, DbType.greenplum, DbType.edb}) {
for (String sql : new String[]{
"vacuum ",
"vacuum ;vacuum ",
"vacuum;vacuum;vacuum bb;",
}) {
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> statementList = parser.parseStatementList();
System.out.println(dbType + "原始的sql===" + sql);
System.out.println(dbType + "生成的sql===" + statementList);
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(dbType);
statementList.get(0).accept(visitor);
Map<TableStat.Name, TableStat> tableMap = visitor.getTables();
assertTrue(tableMap.isEmpty());
}
}
}
@Test
public void test_vacuum() throws Exception {
for (DbType dbType : new DbType[]{DbType.postgresql, DbType.greenplum, DbType.edb}) {
for (String sql : new String[]{
"vacuum WORK.TABLE1;",
"vacuum WORK.TABLE1,WORK.TABLE2;",
"vacuum VERBOSE WORK.TABLE1,WORK.TABLE2;",
"vacuum VERBOSE FULL WORK.TABLE1,WORK.TABLE2;",
"vacuum VERBOSE FREEZE WORK.TABLE1,WORK.TABLE2,WORK.TABLE3;",
"vacuum VERBOSE ANALYZE SKIP_LOCKED PROCESS_TOAST WORK.TABLE1;",
"vacuum SKIP_LOCKED TRUNCATE WORK.TABLE1;",
"VACUUM FULL FREEZE VERBOSE ANALYZE DISABLE_PAGE_SKIPPING SKIP_LOCKED PROCESS_TOAST TRUNCATE WORK.TABLE3;",
}) {
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
SQLStatement statement = parser.parseStatement();
System.out.println(dbType + "原始的sql===" + sql);
System.out.println(dbType + "生成的sql===" + statement);
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(dbType);
statement.accept(visitor);
System.out.println(dbType + "getTables==" + visitor.getTables());
Map<TableStat.Name, TableStat> tableMap = visitor.getTables();
assertFalse(tableMap.isEmpty());
}
}
}
}
| Issue5412 |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/test/java/org/apache/camel/component/salesforce/AbstractBulkApiTestBase.java | {
"start": 1187,
"end": 3676
} | class ____ extends AbstractSalesforceTestBase {
protected JobInfo createJob(JobInfo jobInfo) {
jobInfo = template().requestBody("direct:createJob", jobInfo, JobInfo.class);
assertNotNull(jobInfo.getId(), "Missing JobId");
return jobInfo;
}
@Override
protected RouteBuilder doCreateRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// test createJob
from("direct:createJob").to("salesforce://createJob");
// test getJob
from("direct:getJob").to("salesforce:getJob");
// test closeJob
from("direct:closeJob").to("salesforce:closeJob");
// test abortJob
from("direct:abortJob").to("salesforce:abortJob");
// test createBatch
from("direct:createBatch").to("salesforce:createBatch");
// test getBatch
from("direct:getBatch").to("salesforce:getBatch");
// test getAllBatches
from("direct:getAllBatches").to("salesforce:getAllBatches");
// test getRequest
from("direct:getRequest").to("salesforce:getRequest");
// test getResults
from("direct:getResults").to("salesforce:getResults");
// test createBatchQuery
from("direct:createBatchQuery")
.to("salesforce:createBatchQuery?sObjectQuery=SELECT Name, Description__c, Price__c, Total_Inventory__c FROM Merchandise__c WHERE Name LIKE '%25Bulk API%25'");
// test getQueryResultIds
from("direct:getQueryResultIds").to("salesforce:getQueryResultIds");
// test getQueryResult
from("direct:getQueryResult").to("salesforce:getQueryResult");
}
};
}
protected boolean batchProcessed(BatchInfo batchInfo) {
BatchStateEnum state = batchInfo.getState();
return !(state == BatchStateEnum.QUEUED || state == BatchStateEnum.IN_PROGRESS);
}
protected BatchInfo getBatchInfo(BatchInfo batchInfo) {
batchInfo = template().requestBody("direct:getBatch", batchInfo, BatchInfo.class);
assertNotNull(batchInfo, "Null batch");
assertNotNull(batchInfo.getId(), "Null batch id");
return batchInfo;
}
}
| AbstractBulkApiTestBase |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java | {
"start": 37561,
"end": 37859
} | class ____ {
public void doTest() {
Client client = new Client();
int x = client.multiply(5 + 3, 10) * 5;
}
}
""")
.addOutputLines(
"out/Caller.java",
"""
public final | Caller |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java | {
"start": 24661,
"end": 25080
} | class ____ {
private final OptionalDouble actual = OptionalDouble.empty();
@Test
void should_run_test_when_assumption_passes() {
thenCode(() -> given(actual).isEmpty()).doesNotThrowAnyException();
}
@Test
void should_ignore_test_when_assumption_fails() {
expectAssumptionNotMetException(() -> given(actual).isNotEmpty());
}
}
@Nested
| BDDAssumptions_given_OptionalDouble_Test |
java | spring-projects__spring-security | test/src/test/java/org/springframework/security/test/web/servlet/request/SecurityMockMvcRequestPostProcessorsCsrfTests.java | {
"start": 7697,
"end": 8121
} | class ____ extends HttpServletRequestWrapper {
HttpSession session = new MockHttpSession();
SessionRequestWrapper(HttpServletRequest request) {
super(request);
}
@Override
public HttpSession getSession(boolean create) {
return this.session;
}
@Override
public HttpSession getSession() {
return this.session;
}
}
}
@Configuration
@EnableWebSecurity
static | SessionRequestWrapper |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/authorization/method/PostFilterAuthorizationMethodInterceptor.java | {
"start": 1821,
"end": 5037
} | class ____ implements AuthorizationAdvisor {
private Supplier<SecurityContextHolderStrategy> securityContextHolderStrategy = SecurityContextHolder::getContextHolderStrategy;
private PostFilterExpressionAttributeRegistry registry = new PostFilterExpressionAttributeRegistry();
private int order = AuthorizationInterceptorsOrder.POST_FILTER.getOrder();
private final Pointcut pointcut;
/**
* Creates a {@link PostFilterAuthorizationMethodInterceptor} using the provided
* parameters
*/
public PostFilterAuthorizationMethodInterceptor() {
this.pointcut = AuthorizationMethodPointcuts.forAnnotations(PostFilter.class);
}
/**
* Use this {@link MethodSecurityExpressionHandler}.
* @param expressionHandler the {@link MethodSecurityExpressionHandler} to use
*/
public void setExpressionHandler(MethodSecurityExpressionHandler expressionHandler) {
this.registry.setExpressionHandler(expressionHandler);
}
/**
* Configure pre/post-authorization template resolution
* <p>
* By default, this value is <code>null</code>, which indicates that templates should
* not be resolved.
* @param defaults - whether to resolve pre/post-authorization templates parameters
* @since 6.4
*/
public void setTemplateDefaults(AnnotationTemplateExpressionDefaults defaults) {
this.registry.setTemplateDefaults(defaults);
}
/**
* {@inheritDoc}
*/
@Override
public int getOrder() {
return this.order;
}
public void setOrder(int order) {
this.order = order;
}
/**
* {@inheritDoc}
*/
@Override
public Pointcut getPointcut() {
return this.pointcut;
}
@Override
public Advice getAdvice() {
return this;
}
@Override
public boolean isPerInstance() {
return true;
}
/**
* Sets the {@link SecurityContextHolderStrategy} to use. The default action is to use
* the {@link SecurityContextHolderStrategy} stored in {@link SecurityContextHolder}.
*
* @since 5.8
*/
public void setSecurityContextHolderStrategy(SecurityContextHolderStrategy strategy) {
this.securityContextHolderStrategy = () -> strategy;
}
/**
* Filter a {@code returnedObject} using the {@link PostFilter} annotation that the
* {@link MethodInvocation} specifies.
* @param mi the {@link MethodInvocation} to check check
* @return filtered {@code returnedObject}
*/
@Override
public @Nullable Object invoke(MethodInvocation mi) throws Throwable {
Object returnedObject = mi.proceed();
ExpressionAttribute attribute = this.registry.getAttribute(mi);
if (attribute == null) {
return returnedObject;
}
MethodSecurityExpressionHandler expressionHandler = this.registry.getExpressionHandler();
EvaluationContext ctx = expressionHandler.createEvaluationContext(this::getAuthentication, mi);
return expressionHandler.filter(returnedObject, attribute.getExpression(), ctx);
}
private Authentication getAuthentication() {
Authentication authentication = this.securityContextHolderStrategy.get().getContext().getAuthentication();
if (authentication == null) {
throw new AuthenticationCredentialsNotFoundException(
"An Authentication object was not found in the SecurityContext");
}
return authentication;
}
}
| PostFilterAuthorizationMethodInterceptor |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/configurers/HeadersConfigurer.java | {
"start": 33019,
"end": 33506
} | class ____ {
private ReferrerPolicyHeaderWriter writer;
private ReferrerPolicyConfig() {
}
/**
* Sets the policy to be used in the response header.
* @param policy a referrer policy
* @return the {@link ReferrerPolicyConfig} for additional configuration
* @throws IllegalArgumentException if policy is null
*/
public ReferrerPolicyConfig policy(ReferrerPolicy policy) {
this.writer.setPolicy(policy);
return this;
}
}
public final | ReferrerPolicyConfig |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/codec/ServerSentEventHttpMessageReader.java | {
"start": 1720,
"end": 7204
} | class ____ implements HttpMessageReader<Object> {
private static final ResolvableType STRING_TYPE = ResolvableType.forClass(String.class);
private final @Nullable Decoder<?> decoder;
private final StringDecoder lineDecoder = StringDecoder.textPlainOnly();
/**
* Constructor without a {@code Decoder}. In this mode only {@code String}
* is supported as the data of an event.
*/
public ServerSentEventHttpMessageReader() {
this(null);
}
/**
* Constructor with JSON {@code Decoder} for decoding to Objects.
* Support for decoding to {@code String} event data is built-in.
*/
public ServerSentEventHttpMessageReader(@Nullable Decoder<?> decoder) {
this.decoder = decoder;
}
/**
* Return the configured {@code Decoder}.
*/
public @Nullable Decoder<?> getDecoder() {
return this.decoder;
}
/**
* Configure a limit on the maximum number of bytes per SSE event which are
* buffered before the event is parsed.
* <p>Note that the {@link #getDecoder() data decoder}, if provided, must
* also be customized accordingly to raise the limit if necessary in order
* to be able to parse the data portion of the event.
* <p>By default this is set to 256K.
* @param byteCount the max number of bytes to buffer, or -1 for unlimited
* @since 5.1.13
*/
public void setMaxInMemorySize(int byteCount) {
this.lineDecoder.setMaxInMemorySize(byteCount);
}
/**
* Return the {@link #setMaxInMemorySize configured} byte count limit.
* @since 5.1.13
*/
public int getMaxInMemorySize() {
return this.lineDecoder.getMaxInMemorySize();
}
@Override
public List<MediaType> getReadableMediaTypes() {
return Collections.singletonList(MediaType.TEXT_EVENT_STREAM);
}
@Override
public boolean canRead(ResolvableType elementType, @Nullable MediaType mediaType) {
return (MediaType.TEXT_EVENT_STREAM.includes(mediaType) || isServerSentEvent(elementType));
}
private boolean isServerSentEvent(ResolvableType elementType) {
return ServerSentEvent.class.isAssignableFrom(elementType.toClass());
}
@Override
public Flux<Object> read(
ResolvableType elementType, ReactiveHttpInputMessage message, Map<String, Object> hints) {
LimitTracker limitTracker = new LimitTracker();
boolean shouldWrap = isServerSentEvent(elementType);
ResolvableType valueType = (shouldWrap ? elementType.getGeneric() : elementType);
return this.lineDecoder.decode(message.getBody(), STRING_TYPE, null, hints)
.doOnNext(limitTracker::afterLineParsed)
.bufferUntil(String::isEmpty)
.concatMap(lines -> {
Object event = buildEvent(lines, valueType, shouldWrap, hints);
return (event != null ? Mono.just(event) : Mono.empty());
});
}
@SuppressWarnings("NullAway") // Dataflow analysis limitation
private @Nullable Object buildEvent(List<String> lines, ResolvableType valueType, boolean shouldWrap,
Map<String, Object> hints) {
ServerSentEvent.Builder<Object> sseBuilder = (shouldWrap ? ServerSentEvent.builder() : null);
StringBuilder data = null;
StringBuilder comment = null;
for (String line : lines) {
if (line.startsWith("data:")) {
data = (data != null ? data : new StringBuilder());
int length = line.length();
if (length > 5) {
int index = (line.charAt(5) != ' ' ? 5 : 6);
if (length > index) {
data.append(line, index, line.length());
}
}
data.append('\n');
}
else if (shouldWrap) {
if (line.startsWith("id:")) {
sseBuilder.id(line.substring(3).trim());
}
else if (line.startsWith("event:")) {
sseBuilder.event(line.substring(6).trim());
}
else if (line.startsWith("retry:")) {
sseBuilder.retry(Duration.ofMillis(Long.parseLong(line.substring(6).trim())));
}
else if (line.startsWith(":")) {
comment = (comment != null ? comment : new StringBuilder());
comment.append(line.substring(1).trim()).append('\n');
}
}
}
Object decodedData = (data != null ? decodeData(data, valueType, hints) : null);
if (shouldWrap) {
if (comment != null) {
sseBuilder.comment(comment.substring(0, comment.length() - 1));
}
if (decodedData != null) {
sseBuilder.data(decodedData);
}
return sseBuilder.build();
}
else {
return decodedData;
}
}
private @Nullable Object decodeData(StringBuilder data, ResolvableType dataType, Map<String, Object> hints) {
if (String.class == dataType.resolve()) {
return data.substring(0, data.length() - 1);
}
if (this.decoder == null) {
throw new CodecException("No SSE decoder configured and the data is not String.");
}
byte[] bytes = data.toString().getBytes(StandardCharsets.UTF_8);
DataBuffer buffer = DefaultDataBufferFactory.sharedInstance.wrap(bytes); // wrapping only, no allocation
return this.decoder.decode(buffer, dataType, MediaType.TEXT_EVENT_STREAM, hints);
}
@Override
public Mono<Object> readMono(
ResolvableType elementType, ReactiveHttpInputMessage message, Map<String, Object> hints) {
// In order of readers, we're ahead of String + "*/*"
// If this is called, simply delegate to StringDecoder
if (elementType.resolve() == String.class) {
Flux<DataBuffer> body = message.getBody();
return this.lineDecoder.decodeToMono(body, elementType, null, null).cast(Object.class);
}
return Mono.error(new UnsupportedOperationException(
"ServerSentEventHttpMessageReader only supports reading stream of events as a Flux"));
}
private | ServerSentEventHttpMessageReader |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/lifecycle/LifecycleExecutionException.java | {
"start": 1125,
"end": 3564
} | class ____ extends Exception {
private MavenProject project;
public LifecycleExecutionException(String message) {
super(message);
}
public LifecycleExecutionException(Throwable cause) {
super(cause);
}
public LifecycleExecutionException(String message, Throwable cause) {
super(message, cause);
}
public LifecycleExecutionException(String message, MavenProject project) {
super(message);
this.project = project;
}
public LifecycleExecutionException(String message, MojoExecution execution, MavenProject project) {
super(message);
this.project = project;
}
public LifecycleExecutionException(String message, MojoExecution execution, MavenProject project, Throwable cause) {
super(message, cause);
this.project = project;
}
public LifecycleExecutionException(MojoExecution execution, MavenProject project, Throwable cause) {
this(new DefaultMessageBuilderFactory(), execution, project, cause);
}
public LifecycleExecutionException(
MessageBuilderFactory messageBuilderFactory,
MojoExecution execution,
MavenProject project,
Throwable cause) {
this(createMessage(messageBuilderFactory, execution, project, cause), execution, project, cause);
}
public MavenProject getProject() {
return project;
}
private static String createMessage(
MessageBuilderFactory messageBuilderFactory,
MojoExecution execution,
MavenProject project,
Throwable cause) {
MessageBuilder buffer = messageBuilderFactory.builder(256);
buffer.a("Failed to execute goal");
if (execution != null) {
buffer.a(' ');
buffer.mojo(execution.getGroupId()
+ ':'
+ execution.getArtifactId()
+ ':'
+ execution.getVersion()
+ ':'
+ execution.getGoal());
buffer.a(' ').strong('(' + execution.getExecutionId() + ')');
}
if (project != null) {
buffer.a(" on project ");
buffer.project(project.getArtifactId());
}
if (cause != null) {
buffer.a(": ").failure(cause.getMessage());
}
return buffer.toString();
}
}
| LifecycleExecutionException |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/resultcache/QueryResultCacheTests.java | {
"start": 1721,
"end": 6917
} | class ____ {
private final LoggerContext context = (LoggerContext) LogManager.getContext( false );
private final Configuration configuration = context.getConfiguration();
private final LoggerConfig resultsLoggerConfig = configuration.getLoggerConfig( ResultsLogger.LOGGER_NAME );
private final LoggerConfig execLoggerConfig = configuration.getLoggerConfig( SqlExecLogger.LOGGER_NAME );
private final LoggerConfig cacheLoggerConfig = configuration.getLoggerConfig( SecondLevelCacheLogger.LOGGER_NAME );
Logger resultsLogger = LogManager.getLogger( ResultsLogger.LOGGER_NAME );
private Level originalResultsLevel;
private Level originalExecLevel;
private Level originalCacheLevel;
@BeforeAll
public void setUpLogger() {
originalResultsLevel = resultsLoggerConfig.getLevel();
resultsLoggerConfig.setLevel( Level.TRACE );
originalExecLevel = execLoggerConfig.getLevel();
execLoggerConfig.setLevel( Level.TRACE );
originalCacheLevel = cacheLoggerConfig.getLevel();
cacheLoggerConfig.setLevel( Level.TRACE );
}
@AfterAll
public void resetLogger() {
resultsLoggerConfig.setLevel( originalResultsLevel );
execLoggerConfig.setLevel( originalExecLevel );
cacheLoggerConfig.setLevel( originalCacheLevel );
}
@Test
public void testScalarCaching(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
assertThat( statistics.getPrepareStatementCount(), is( 0L ) );
final String hql = "select e.id, e.name from TestEntity e order by e.id";
scope.inTransaction(
session -> {
resultsLogger.debug( "First query ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" );
final List values = session.createQuery( hql )
.setCacheable( true )
.setCacheMode( CacheMode.NORMAL )
.setCacheRegion( "scalar-region" )
.list();
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
verifyScalarResults( values );
}
);
scope.inTransaction(
session -> {
resultsLogger.debug( "Second query ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" );
final List values = session.createQuery( hql )
.setCacheable( true )
.setCacheMode( CacheMode.NORMAL )
.setCacheRegion( "scalar-region" )
.list();
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
verifyScalarResults( values );
}
);
}
private void verifyScalarResults(List values) {
assertThat( values.size(), is( 2 ) );
final Object[] firstRow = (Object[]) values.get( 0 );
assertThat( firstRow[0], is( 1 ) );
assertThat( firstRow[1], is( "first" ) );
final Object[] secondRow = (Object[]) values.get( 1 );
assertThat( secondRow[0], is( 2 ) );
assertThat( secondRow[1], is( "second" ) );
}
@Test
public void testJoinFetchCaching(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
assertThat( statistics.getPrepareStatementCount(), is( 0L ) );
final String hql = "select e from AggregateEntity e join fetch e.value1 join fetch e.value2";
scope.inTransaction(
session -> {
resultsLogger.debug( "First query ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" );
final List<AggregateEntity> values = session.createQuery( hql, AggregateEntity.class )
.setCacheable( true )
.setCacheMode( CacheMode.NORMAL )
.setCacheRegion( "fetch-region" )
.list();
verifyFetchResults( values );
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
}
);
scope.inTransaction(
session -> {
resultsLogger.debug( "Second query ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" );
final List<AggregateEntity> values = session.createQuery( hql, AggregateEntity.class )
.setCacheable( true )
.setCacheMode( CacheMode.NORMAL )
.setCacheRegion( "fetch-region" )
.list();
verifyFetchResults( values );
assertThat( statistics.getPrepareStatementCount(), is( 1L ) );
}
);
}
private void verifyFetchResults(List<AggregateEntity> values) {
assertThat( values.size(), is( 1 ) );
final AggregateEntity rootEntity = values.get( 0 );
assertThat( rootEntity.getValue1(), notNullValue() );
assertTrue( Hibernate.isInitialized( rootEntity.getValue1() ) );
assertThat( rootEntity.getValue1().getId(), is( 1 ) );
assertThat( rootEntity.getValue1().getName(), is( "first" ) );
assertThat( rootEntity.getValue2(), notNullValue() );
assertTrue( Hibernate.isInitialized( rootEntity.getValue2() ) );
assertThat( rootEntity.getValue2().getId(), is( 2 ) );
assertThat( rootEntity.getValue2().getName(), is( "second" ) );
}
@BeforeEach
public void prepareTestData(SessionFactoryScope scope) {
scope.inTransaction(
session -> session.persist(
new AggregateEntity(
1,
"aggregate",
new TestEntity( 1, "first" ),
new TestEntity( 2, "second" )
)
)
);
}
@AfterEach
public void cleanupTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
}
| QueryResultCacheTests |
java | quarkusio__quarkus | independent-projects/tools/devtools-testing/src/test/java/io/quarkus/devtools/codestarts/quarkus/QuarkusCodestartGenerationTest.java | {
"start": 8846,
"end": 21973
} | class ____ extends BonjourResourceTest"));
}
@Test
void generateMavenDefaultJava(TestInfo testInfo) throws Throwable {
final QuarkusCodestartProjectInput input = newInputBuilder()
.addData(getGenerationTestInputData())
.build();
final Path projectDir = testDirPath.resolve("maven-default-java");
getCatalog().createProject(input).generate(projectDir);
checkMaven(projectDir);
checkReadme(projectDir);
checkDockerfiles(projectDir, BuildTool.MAVEN);
checkConfigProperties(projectDir);
assertThatMatchSnapshot(testInfo, projectDir, "src/main/java/org/acme/GreetingResource.java");
assertThatMatchSnapshot(testInfo, projectDir, "src/test/java/org/acme/GreetingResourceTest.java");
assertThatMatchSnapshot(testInfo, projectDir, "src/test/java/org/acme/GreetingResourceIT.java");
}
@Test
void generateGradleDefaultJava(TestInfo testInfo) throws Throwable {
final QuarkusCodestartProjectInput input = newInputBuilder()
.buildTool(BuildTool.GRADLE)
.addData(getGenerationTestInputData())
.build();
final Path projectDir = testDirPath.resolve("gradle-default-java");
getCatalog().createProject(input).generate(projectDir);
checkGradle(projectDir);
checkReadme(projectDir);
checkDockerfiles(projectDir, BuildTool.GRADLE);
checkConfigProperties(projectDir);
assertThatMatchSnapshot(testInfo, projectDir, "src/main/java/org/acme/GreetingResource.java");
assertThatMatchSnapshot(testInfo, projectDir, "src/test/java/org/acme/GreetingResourceTest.java");
assertThatMatchSnapshot(testInfo, projectDir, "src/native-test/java/org/acme/GreetingResourceIT.java");
}
@Test
void generateMavenResteasyJava(TestInfo testInfo) throws Throwable {
final QuarkusCodestartProjectInput input = newInputBuilder()
.addExtension(ArtifactKey.fromString("io.quarkus:quarkus-resteasy"))
.addData(getGenerationTestInputData())
.build();
final Path projectDir = testDirPath.resolve("maven-resteasy-java");
getCatalog().createProject(input).generate(projectDir);
checkMaven(projectDir);
checkReadme(projectDir);
checkDockerfiles(projectDir, BuildTool.MAVEN);
checkConfigProperties(projectDir);
assertThatMatchSnapshot(testInfo, projectDir, "src/main/java/org/acme/GreetingResource.java");
assertThatMatchSnapshot(testInfo, projectDir, "src/test/java/org/acme/GreetingResourceTest.java");
assertThatMatchSnapshot(testInfo, projectDir, "src/test/java/org/acme/GreetingResourceIT.java");
}
@Test
void generateMavenConfigYamlJava(TestInfo testInfo) throws Throwable {
final QuarkusCodestartProjectInput input = newInputBuilder()
.addExtension(ArtifactKey.fromString("io.quarkus:quarkus-config-yaml"))
.addData(getGenerationTestInputData())
.build();
final Path projectDir = testDirPath.resolve("maven-yaml-java");
getCatalog().createProject(input).generate(projectDir);
checkMaven(projectDir);
checkReadme(projectDir);
checkDockerfiles(projectDir, BuildTool.MAVEN);
checkConfigYaml(projectDir);
}
@Test
public void generateGradleWrapperGithubAction(TestInfo testInfo) throws Throwable {
final QuarkusCodestartProjectInput input = newInputBuilder()
.buildTool(BuildTool.GRADLE)
.addData(getGenerationTestInputData())
.addCodestarts(Collections.singletonList("tooling-github-action"))
.build();
Path projectDir = testDirPath.resolve("gradle-github");
getCatalog().createProject(input).generate(projectDir);
checkGradle(projectDir);
assertThatMatchSnapshot(testInfo, projectDir, ".github/workflows/ci.yml")
.satisfies(
checkContains("cache: gradle"),
checkContains("run: ./gradlew build"));
}
@Test
public void generateMavenGithubAction(TestInfo testInfo) throws Throwable {
final QuarkusCodestartProjectInput input = newInputBuilder()
.buildTool(BuildTool.MAVEN)
.addData(getGenerationTestInputData())
.addCodestarts(Collections.singletonList("tooling-github-action"))
.build();
Path projectDir = testDirPath.resolve("maven-github");
getCatalog().createProject(input).generate(projectDir);
assertThatMatchSnapshot(testInfo, projectDir, ".github/workflows/ci.yml")
.satisfies(checkContains("cache: maven"));
}
@Test
public void generateGradleNoWrapperGithubAction(TestInfo testInfo) throws Throwable {
final QuarkusCodestartProjectInput input = newInputBuilder()
.buildTool(BuildTool.GRADLE)
.noBuildToolWrapper()
.addData(getGenerationTestInputData())
.addCodestarts(Collections.singletonList("tooling-github-action"))
.build();
Path projectDir = testDirPath.resolve("gradle-nowrapper-github");
getCatalog().createProject(input).generate(projectDir);
checkGradle(projectDir);
assertThatMatchSnapshot(testInfo, projectDir, ".github/workflows/ci.yml")
.satisfies(
checkContains("uses: gradle/actions/setup-gradle"),
checkContains("cache: gradle"));
}
private void checkDockerfiles(Path projectDir, BuildTool buildTool) {
switch (buildTool) {
case MAVEN:
checkDockerfilesWithMaven(projectDir);
break;
case GRADLE_KOTLIN_DSL:
case GRADLE:
checkDockerfilesWithGradle(projectDir);
break;
default:
throw new IllegalArgumentException("Unhandled buildtool");
}
}
private void checkDockerfilesWithMaven(Path projectDir) {
assertThat(projectDir.resolve(".dockerignore")).exists();
assertThat(projectDir.resolve("src/main/docker/Dockerfile.jvm")).exists()
.satisfies(checkContains("./mvnw package"))
.satisfies(checkContains("docker build -f src/main/docker/Dockerfile.jvm"))
.satisfies(checkContains("registry.access.redhat.com/ubi8/openjdk-17:"))
.satisfies(checkContains("ENV JAVA_APP_JAR=\"/deployments/quarkus-run.jar\""))
.satisfies(checkContains("ENTRYPOINT [ \"/opt/jboss/container/java/run/run-java.sh\" ]"));
assertThat(projectDir.resolve("src/main/docker/Dockerfile.legacy-jar")).exists()
.satisfies(checkContains("./mvnw package -Dquarkus.package.jar.type=legacy-jar"))
.satisfies(checkContains("docker build -f src/main/docker/Dockerfile.legacy-jar"))
.satisfies(checkContains("registry.access.redhat.com/ubi8/openjdk-17:"))
.satisfies(checkContains("EXPOSE 8080"))
.satisfies(checkContains("USER 185"))
.satisfies(checkContains("ENV JAVA_APP_JAR=\"/deployments/quarkus-run.jar\""))
.satisfies(checkContains("ENTRYPOINT [ \"/opt/jboss/container/java/run/run-java.sh\" ]"));
assertThat(projectDir.resolve("src/main/docker/Dockerfile.native-micro")).exists()
.satisfies(checkContains("./mvnw package -Dnative"))
.satisfies(checkContains("quay.io/quarkus/quarkus-micro-image"))
.satisfies(checkContains("ENTRYPOINT [\"./application\", \"-Dquarkus.http.host=0.0.0.0\"]"));
assertThat(projectDir.resolve("src/main/docker/Dockerfile.native")).exists()
.satisfies(checkContains("./mvnw package -Dnative"))
.satisfies(checkContains("registry.access.redhat.com/ubi8/ubi-minimal"))
.satisfies(checkContains("ENTRYPOINT [\"./application\", \"-Dquarkus.http.host=0.0.0.0\"]"));
}
private void checkDockerfilesWithGradle(Path projectDir) {
assertThat(projectDir.resolve(".dockerignore")).exists();
assertThat(projectDir.resolve("src/main/docker/Dockerfile.jvm")).exists()
.satisfies(checkContains("./gradlew build"))
.satisfies(checkContains("docker build -f src/main/docker/Dockerfile.jvm"))
.satisfies(checkContains("registry.access.redhat.com/ubi8/openjdk-17:"))
.satisfies(checkContains("ENV JAVA_APP_JAR=\"/deployments/quarkus-run.jar\""))
.satisfies(checkContains("ENTRYPOINT [ \"/opt/jboss/container/java/run/run-java.sh\" ]"));
assertThat(projectDir.resolve("src/main/docker/Dockerfile.legacy-jar")).exists()
.satisfies(checkContains("./gradlew build -Dquarkus.package.jar.type=legacy-jar"))
.satisfies(checkContains("docker build -f src/main/docker/Dockerfile.legacy-jar"))
.satisfies(checkContains("registry.access.redhat.com/ubi8/openjdk-17:"))
.satisfies(checkContains("EXPOSE 8080"))
.satisfies(checkContains("USER 185"))
.satisfies(checkContains("ENV JAVA_APP_JAR=\"/deployments/quarkus-run.jar\""))
.satisfies(checkContains("ENTRYPOINT [ \"/opt/jboss/container/java/run/run-java.sh\" ]"));
assertThat(projectDir.resolve("src/main/docker/Dockerfile.native-micro")).exists()
.satisfies(checkContains("./gradlew build -Dquarkus.native.enabled=true"))
.satisfies(checkContains("quay.io/quarkus/quarkus-micro-image:2.0"))
.satisfies(checkContains("ENTRYPOINT [\"./application\", \"-Dquarkus.http.host=0.0.0.0\"]"));
assertThat(projectDir.resolve("src/main/docker/Dockerfile.native")).exists()
.satisfies(checkContains("./gradlew build -Dquarkus.native.enabled=true"))
.satisfies(checkContains("registry.access.redhat.com/ubi8/ubi-minimal"))
.satisfies(checkContains("ENTRYPOINT [\"./application\", \"-Dquarkus.http.host=0.0.0.0\"]"));
}
private void checkConfigProperties(Path projectDir) {
assertThat(projectDir.resolve("src/main/resources/application.yml")).doesNotExist();
assertThat(projectDir.resolve("src/main/resources/application.properties")).exists();
}
private void checkConfigYaml(Path projectDir) {
assertThat(projectDir.resolve("src/main/resources/application.yml")).exists();
assertThat(projectDir.resolve("src/main/resources/application.properties")).doesNotExist();
}
private void checkReadme(Path projectDir) {
assertThat(projectDir.resolve("README.md")).exists();
assertThat(projectDir.resolve(".gitignore")).exists();
}
private void checkMaven(Path projectDir) {
assertThat(projectDir.resolve("pom.xml"))
.exists()
.satisfies(checkContains("<groupId>org.test</groupId>"))
.satisfies(checkContains("<artifactId>test-codestart</artifactId>"))
.satisfies(checkContains("<version>1.0.0-codestart</version>"));
assertThat(projectDir.resolve("build.gradle")).doesNotExist();
assertThat(projectDir.resolve("gradle.properties")).doesNotExist();
assertThat(projectDir.resolve("settings.properties")).doesNotExist();
}
private void checkGradle(Path projectDir) {
assertThat(projectDir.resolve("pom.xml")).doesNotExist();
assertThat(projectDir.resolve("build.gradle.kts")).doesNotExist();
assertThat(projectDir.resolve("settings.gradle.kts")).doesNotExist();
assertThat(projectDir.resolve("build.gradle"))
.exists()
.satisfies(checkContains("group = 'org.test'"))
.satisfies(checkContains("version = '1.0.0-codestart'"));
assertThat(projectDir.resolve("gradle.properties")).exists();
assertThat(projectDir.resolve("settings.gradle"))
.exists()
.satisfies(checkContains("rootProject.name='test-codestart'"));
}
private void checkGradleWithKotlinDsl(Path projectDir) {
assertThat(projectDir.resolve("pom.xml")).doesNotExist();
assertThat(projectDir.resolve("build.gradle")).doesNotExist();
assertThat(projectDir.resolve("settings.gradle")).doesNotExist();
assertThat(projectDir.resolve("build.gradle.kts"))
.exists()
.satisfies(checkContains("group = \"org.test\""))
.satisfies(checkContains("version = \"1.0.0-codestart\""));
assertThat(projectDir.resolve("gradle.properties")).exists();
assertThat(projectDir.resolve("settings.gradle.kts"))
.exists()
.satisfies(checkContains("rootProject.name=\"test-codestart\""));
}
private QuarkusCodestartCatalog getCatalog() {
return FAKE_QUARKUS_CODESTART_CATALOG;
}
}
| BonjourResourceIT |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/aot/samples/basic/BasicSpringJupiterParameterizedClassTests.java | {
"start": 2440,
"end": 3024
} | class ____ {
@Test
void test(@Autowired ApplicationContext context, @Autowired MessageService messageService,
@Value("${test.engine}") String testEngine, @Value("${foo}") String foo) {
assertThat("foo".equals(parameterizedString) || "bar".equals(parameterizedString)).isTrue();
assertThat(messageService.generateMessage()).isEqualTo("¡Hola, AOT!");
assertThat(foo).isEqualTo("bar");
assertThat(testEngine).isEqualTo("jupiter");
BasicSpringJupiterTests.assertEnvProperties(context);
}
@Nested
@TestPropertySource(properties = "foo=quux")
public | NestedTests |
java | redisson__redisson | redisson/src/test/java/org/redisson/executor/RedissonScheduledExecutorServiceTest.java | {
"start": 16985,
"end": 23989
} | class ____ implements Runnable, Serializable {
@Override
public void run() {
try {
Thread.sleep(4000);
} catch (InterruptedException interruptedException) {
interruptedException.printStackTrace();
}
}
}
@Test
@Timeout(15)
public void testCancel2() throws InterruptedException {
RScheduledExecutorService e = redisson.getExecutorService("myExecutor");
e.registerWorkers(WorkerOptions.defaults());
String taskId = redisson.getExecutorService("myExecutor").schedule(new RunnableTask2(), 2000, TimeUnit.MILLISECONDS).getTaskId();
Thread.sleep(5500);
assertThat(e.cancelTask(taskId)).isFalse();
}
@Test
public void testCancel() throws InterruptedException, ExecutionException {
RScheduledExecutorService executor = redisson.getExecutorService("test");
ScheduledFuture<?> future1 = executor.schedule(new ScheduledRunnableTask("executed1"), 1, TimeUnit.SECONDS);
cancel(future1);
Thread.sleep(2000);
assertThat(redisson.getAtomicLong("executed1").isExists()).isFalse();
executor.delete();
redisson.getKeys().delete("executed1");
assertThat(redisson.getKeys().count()).isZero();
}
@Test
public void testShutdownWithCancelAndOfflineExecutor() throws InterruptedException, ExecutionException {
RScheduledExecutorService executor = redisson.getExecutorService("test2");
ScheduledFuture<?> future1 = executor.schedule(new ScheduledRunnableTask("executed1"), 1, TimeUnit.SECONDS);
cancel(future1);
Thread.sleep(2000);
assertThat(redisson.getAtomicLong("executed1").isExists()).isFalse();
executor.delete();
redisson.getKeys().delete("executed1");
assertThat(redisson.getKeys().count()).isZero();
}
@Test
public void testCancelAndInterrupt() throws InterruptedException, ExecutionException {
RScheduledExecutorService executor = redisson.getExecutorService("test");
ScheduledFuture<?> future = executor.schedule(new ScheduledLongRunnableTask("executed1"), 1, TimeUnit.SECONDS);
Thread.sleep(2000);
cancel(future);
assertThat(redisson.<Long>getBucket("executed1").get()).isBetween(1000L, Long.MAX_VALUE);
RScheduledFuture<?> futureAsync = executor.scheduleAsync(new ScheduledLongRunnableTask("executed2"), 1, TimeUnit.SECONDS);
Thread.sleep(2000);
assertThat(executor.cancelTask(futureAsync.getTaskId())).isTrue();
assertThat(redisson.<Long>getBucket("executed2").get()).isBetween(1000L, Long.MAX_VALUE);
executor.delete();
redisson.getKeys().delete("executed1", "executed2");
assertThat(redisson.getKeys().count()).isZero();
}
@Test
public void testCancelWithFixedDelay() throws InterruptedException, ExecutionException {
RScheduledExecutorService executor = redisson.getExecutorService("test");
ScheduledFuture<?> future1 = executor.scheduleWithFixedDelay(new ScheduledRunnableTask("executed1"), 1, 2, TimeUnit.SECONDS);
Thread.sleep(10000);
assertThat(redisson.getAtomicLong("executed1").get()).isEqualTo(5);
cancel(future1);
Thread.sleep(3000);
assertThat(redisson.getAtomicLong("executed1").get()).isEqualTo(5);
RScheduledFuture<?> futureAsync = executor.scheduleWithFixedDelayAsync(new ScheduledRunnableTask("executed2"), 1, 2, TimeUnit.SECONDS);
Thread.sleep(4000);
assertThat(redisson.getAtomicLong("executed2").get()).isEqualTo(2);
assertThat(executor.cancelTask(futureAsync.getTaskId())).isTrue();
Thread.sleep(3000);
assertThat(redisson.getAtomicLong("executed2").get()).isEqualTo(2);
executor.delete();
redisson.getKeys().delete("executed1", "executed2");
assertThat(redisson.getKeys().count()).isZero();
}
@Test
public void testCancelAndInterruptWithFixedDelay() throws InterruptedException, ExecutionException {
RScheduledExecutorService executor = redisson.getExecutorService("test");
ScheduledFuture<?> future1 = executor.scheduleWithFixedDelay(new ScheduledLongRepeatableTask("counter", "executed1"), 1, 2, TimeUnit.SECONDS);
Thread.sleep(6000);
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(3);
cancel(future1);
Thread.sleep(50);
assertThat(redisson.<Long>getBucket("executed1").get()).isGreaterThan(1000L);
Thread.sleep(3000);
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(3);
redisson.getAtomicLong("counter").delete();
RScheduledFuture<?> future2 = executor.scheduleWithFixedDelay(new ScheduledLongRepeatableTask("counter", "executed2"), 1, 2, TimeUnit.SECONDS);
Thread.sleep(6000);
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(3);
assertThat(executor.cancelTask(future2.getTaskId())).isTrue();
assertThat(redisson.<Long>getBucket("executed2").get()).isGreaterThan(1000L);
Thread.sleep(3000);
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(3);
executor.delete();
redisson.getKeys().delete("counter", "executed1", "executed2");
assertThat(redisson.getKeys().count()).isZero();
}
@Test
public void testCancelAndInterruptSwallowedWithFixedDelay() throws InterruptedException, ExecutionException {
RScheduledExecutorService executor = redisson.getExecutorService("test");
RScheduledFuture<?> future = executor.scheduleWithFixedDelay(new SwallowingInterruptionTask("execution1", "cancel1"), 0, 1, TimeUnit.SECONDS);
Thread.sleep(TimeUnit.SECONDS.toMillis(1));
assertThat(redisson.getAtomicLong("cancel1").get()).isZero();
assertThat(redisson.getAtomicLong("execution1").get()).isEqualTo(1);
cancel(future);
assertThat(redisson.getAtomicLong("cancel1").get()).isEqualTo(1);
assertThat(redisson.getAtomicLong("execution1").get()).isEqualTo(1);
Thread.sleep(TimeUnit.SECONDS.toMillis(6));
assertThat(executor.getTaskCount()).isZero();
assertThat(redisson.getAtomicLong("cancel1").get()).isEqualTo(1);
assertThat(redisson.getAtomicLong("execution1").get()).isEqualTo(1);
executor.delete();
redisson.getKeys().delete("execution1", "cancel1");
assertThat(redisson.getKeys().count()).isZero();
}
private void cancel(ScheduledFuture<?> future1) throws InterruptedException, ExecutionException {
assertThat(future1.cancel(true)).isTrue();
try {
future1.get();
Assertions.fail("CancellationException should arise");
} catch (CancellationException e) {
// skip
}
}
public static | RunnableTask2 |
java | apache__logging-log4j2 | log4j-osgi-test/src/test/java/org/apache/logging/log4j/osgi/tests/OsgiExt.java | {
"start": 1371,
"end": 2879
} | class ____ implements AfterEachCallback, BeforeEachCallback {
private final FrameworkFactory factory;
private Framework framework;
OsgiExt(final FrameworkFactory factory) {
this.factory = factory;
}
@Override
public void afterEach(ExtensionContext context) {
if (framework != null) {
try {
framework.stop();
} catch (final BundleException e) {
throw new RuntimeException(e);
} finally {
framework = null;
}
}
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
try (final InputStream is = OsgiExt.class.getResourceAsStream("/osgi.properties")) {
final Properties props = new Properties();
props.load(is);
final Map<String, String> configMap = props.entrySet().stream()
.collect(Collectors.toMap(
e -> String.valueOf(e.getKey()),
e -> String.valueOf(e.getValue()),
(prev, next) -> next,
HashMap::new));
framework = factory.newFramework(configMap);
framework.init();
framework.start();
}
}
public Framework getFramework() {
return framework;
}
@Override
public String toString() {
return "OsgiExt [factory=" + factory + ", framework=" + framework + "]";
}
}
| OsgiExt |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java | {
"start": 2002,
"end": 2329
} | class ____ extends MasterNodeRequest<InternalOrPrivateSettingsPlugin.UpdateInternalOrPrivateAction.Request> {
ValidRequest() {
super(TEST_REQUEST_TIMEOUT);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}
static | ValidRequest |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RPermitExpirableSemaphoreRx.java | {
"start": 1235,
"end": 16286
} | interface ____ extends RExpirableRx {
/**
* Acquires a permit from this semaphore, blocking until one is
* available, or the thread is {@linkplain Thread#interrupt interrupted}.
*
* <p>Acquires a permit, if one is available and returns its id,
* reducing the number of available permits by one.
*
* <p>If no permit is available then the current thread becomes
* disabled for thread scheduling purposes and lies dormant until
* one of two things happens:
* <ul>
* <li>Some other thread invokes the {@link #release(String)} method for this
* semaphore and the current thread is next to be assigned a permit; or
* <li>Some other thread {@linkplain Thread#interrupt interrupts}
* the current thread.
* </ul>
*
* @return permit id
*/
Single<String> acquire();
/**
* Acquires defined amount of <code>permits</code> from this semaphore, blocking until enough permits are
* available, or the thread is {@linkplain Thread#interrupt interrupted}.
*
* <p>Acquires <code>permits</code> permits, if they are available and returns their ids,
* reducing the number of available permits by <code>permits</code>.
*
* <p>If not enough permits are available then the current thread becomes
* disabled for thread scheduling purposes and lies dormant until
* one of two things happens:
* <ul>
* <li>Some other thread invokes the {@link #release(String)} method for this
* semaphore and the current thread is next to be assigned a permit; or
* <li>Some other thread {@linkplain Thread#interrupt interrupts}
* the current thread.
* </ul>
*
* @param permits - the number of permits to acquire
* @return permits ids
*/
Single<List<String>> acquire(int permits);
/**
* Acquires a permit with defined lease time from this semaphore,
* blocking until one is available,
* or the thread is {@linkplain Thread#interrupt interrupted}.
*
* <p>Acquires a permit, if one is available and returns its id,
* reducing the number of available permits by one.
*
* <p>If no permit is available then the current thread becomes
* disabled for thread scheduling purposes and lies dormant until
* one of two things happens:
* <ul>
* <li>Some other thread invokes the {@link #release} method for this
* semaphore and the current thread is next to be assigned a permit; or
* <li>Some other thread {@linkplain Thread#interrupt interrupts}
* the current thread.
* </ul>
*
* @param leaseTime - permit lease time
* @param unit - time unit
* @return permit id
*/
Single<String> acquire(long leaseTime, TimeUnit unit);
/**
* Acquires defined amount of <code>permits</code> with defined lease time from this semaphore,
* blocking until enough permits are available,
* or the thread is {@linkplain Thread#interrupt interrupted}.
*
* <p>Acquires <code>permits</code> permits, if they are available and returns their ids,
* reducing the number of available permits by <code>permits</code>.
*
* <p>If not enough permits are available then the current thread becomes
* disabled for thread scheduling purposes and lies dormant until
* one of two things happens:
* <ul>
* <li>Some other thread invokes the {@link #release} method for this
* semaphore and the current thread is next to be assigned a permit; or
* <li>Some other thread {@linkplain Thread#interrupt interrupts}
* the current thread.
* </ul>
*
* @param permits - the number of permits to acquire
* @param leaseTime - permit lease time
* @param unit - time unit
* @return permits ids
*/
Single<List<String>> acquire(int permits, long leaseTime, TimeUnit unit);
/**
* Acquires a permit only if one is available at the
* time of invocation.
*
* <p>Acquires a permit, if one is available and returns immediately,
* with the permit id,
* reducing the number of available permits by one.
*
* <p>If no permit is available then this method will return
* immediately with the value {@code null}.
*
* @return permit id if a permit was acquired and {@code null}
* otherwise
*/
Maybe<String> tryAcquire();
/**
* Acquires defined amount of <code>permits</code> only if they are available at the
* time of invocation.
*
* <p>Acquires <code>permits</code> permits, if they are available and returns immediately,
* with the permits ids,
* reducing the number of available permits by <code>permits</code>.
*
* <p>If not enough permits are available then this method will return
* immediately with empty collection.
*
* @param permits - the number of permits to acquire
* @return permits ids if permit were acquired and empty collection
* otherwise
*/
Single<List<String>> tryAcquire(int permits);
/**
* Acquires a permit from this semaphore, if one becomes available
* within the given waiting time and the current thread has not
* been {@linkplain Thread#interrupt interrupted}.
*
* <p>Acquires a permit, if one is available and returns immediately,
* with the permit id,
* reducing the number of available permits by one.
*
* <p>If no permit is available then the current thread becomes
* disabled for thread scheduling purposes and lies dormant until
* one of three things happens:
* <ul>
* <li>Some other thread invokes the {@link #release(String)} method for this
* semaphore and the current thread is next to be assigned a permit; or
* <li>Some other thread {@linkplain Thread#interrupt interrupts}
* the current thread; or
* <li>The specified waiting time elapses.
* </ul>
*
* <p>If a permit is acquired then the permit id is returned.
*
* <p>If the specified waiting time elapses then the value {@code null}
* is returned. If the time is less than or equal to zero, the method
* will not wait at all.
*
* @param waitTime the maximum time to wait for a permit
* @param unit the time unit of the {@code timeout} argument
* @return permit id if a permit was acquired and {@code null}
* if the waiting time elapsed before a permit was acquired
*/
Maybe<String> tryAcquire(long waitTime, TimeUnit unit);
/**
* Acquires a permit with defined lease time from this semaphore,
* if one becomes available
* within the given waiting time and the current thread has not
* been {@linkplain Thread#interrupt interrupted}.
*
* <p>Acquires a permit, if one is available and returns immediately,
* with the permit id,
* reducing the number of available permits by one.
*
* <p>If no permit is available then the current thread becomes
* disabled for thread scheduling purposes and lies dormant until
* one of three things happens:
* <ul>
* <li>Some other thread invokes the {@link #release(String)} method for this
* semaphore and the current thread is next to be assigned a permit; or
* <li>Some other thread {@linkplain Thread#interrupt interrupts}
* the current thread; or
* <li>The specified waiting time elapses.
* </ul>
*
* <p>If a permit is acquired then the permit id is returned.
*
* <p>If the specified waiting time elapses then the value {@code null}
* is returned. If the time is less than or equal to zero, the method
* will not wait at all.
*
* @param waitTime the maximum time to wait for a permit
* @param leaseTime permit lease time
* @param unit the time unit of the {@code timeout} argument
* @return permit id if a permit was acquired and {@code null}
* if the waiting time elapsed before a permit was acquired
*/
Maybe<String> tryAcquire(long waitTime, long leaseTime, TimeUnit unit);
/**
* Acquires defined amount of <code>permits</code> with defined lease time from this semaphore,
* if enough permits become available
* within the given waiting time and the current thread has not
* been {@linkplain Thread#interrupt interrupted}.
*
* <p>Acquires <code>permits</code> permits, if they are available and returns immediately,
* with the permits ids,
* reducing the number of available permits by <code>permits</code>.
*
* <p>If not enough permits are available then the current thread becomes
* disabled for thread scheduling purposes and lies dormant until
* one of three things happens:
* <ul>
* <li>Some other thread invokes the {@link #release(String)} method for this
* semaphore and the current thread is next to be assigned a permit; or
* <li>Some other thread {@linkplain Thread#interrupt interrupts}
* the current thread; or
* <li>The specified waiting time elapses.
* </ul>
*
* <p>If permit are acquired then permits ids are returned.
*
* <p>If the specified waiting time elapses then the empty collection
* is returned. If the time is less than or equal to zero, the method
* will not wait at all.
*
* @param permits the number of permits to acquire
* @param waitTime the maximum time to wait for permits
* @param leaseTime permits lease time
* @param unit the time unit of the {@code timeout} argument
* @return permits ids if permit were acquired and empty collection
* if the waiting time elapsed before permits were acquired
*/
Single<List<String>> tryAcquire(int permits, long waitTime, long leaseTime, TimeUnit unit);
/**
* Releases a permit by its id, returning it to the semaphore.
*
* <p>Releases a permit, increasing the number of available permits by
* one. If any threads of Redisson client are trying to acquire a permit,
* then one is selected and given the permit that was just released.
*
* <p>There is no requirement that a thread that releases a permit must
* have acquired that permit by calling {@link #acquire()}.
* Correct usage of a semaphore is established by programming convention
* in the application.
*
* @param permitId - permit id
* @return {@code true} if a permit has been released and {@code false}
* otherwise
*/
Single<Boolean> tryRelease(String permitId);
/**
* Releases permits by their ids, returning them to the semaphore.
*
* <p>Releases <code>permits</code> permits, increasing the number of available permits
* by released amount. If any threads of Redisson client are trying to acquire a permit,
* then one is selected and given one of the permits that were just released.
*
* <p>There is no requirement that a thread that releases permits must
* have acquired that permit by calling {@link #acquire()}.
* Correct usage of a semaphore is established by programming convention
* in the application.
*
* @param permitsIds - permits ids
* @return amount of released permits
*/
Single<Integer> tryRelease(List<String> permitsIds);
/**
* Releases a permit by its id, returning it to the semaphore.
*
* <p>Releases a permit, increasing the number of available permits by
* one. If any threads of Redisson client are trying to acquire a permit,
* then one is selected and given the permit that was just released.
*
* <p>There is no requirement that a thread that releases a permit must
* have acquired that permit by calling {@link #acquire()}.
* Correct usage of a semaphore is established by programming convention
* in the application.
*
* <p>Throws an exception if permit id doesn't exist or has already been release
*
* @param permitId - permit id
* @return void
*/
Completable release(String permitId);
/**
* Releases permits by their ids, returning them to the semaphore.
*
* <p>Releases <code>permits</code> permits, increasing the number of available permits
* by released amount. If any threads of Redisson client are trying to acquire a permit,
* then one is selected and given the permit that were just released.
*
* <p>There is no requirement that a thread that releases permits must
* have acquired that permit by calling {@link #acquire()}.
* Correct usage of a semaphore is established by programming convention
* in the application.
*
* <p>Throws an exception if permit id doesn't exist or has already been release
*
* @param permitsIds - permits ids
* @return void
*/
Completable release(List<String> permitsIds);
/**
* Returns the current number of available permits.
*
* @return number of available permits
*/
Single<Integer> availablePermits();
/**
* Returns the number of permits.
*
* @return number of permits
*/
Single<Integer> getPermits();
/**
* Returns the number of acquired permits.
*
* @return number of acquired permits
*/
Single<Integer> acquiredPermits();
/**
* Sets number of permits.
*
* @param permits - number of permits
* @return <code>true</code> if permits has been set successfully, otherwise <code>false</code>.
*/
Single<Boolean> trySetPermits(int permits);
/**
* Sets the number of permits to the provided value.
* Calculates the <code>delta</code> between the given <code>permits</code> value and the
* current number of permits, then increases the number of available permits by <code>delta</code>.
*
* @param permits - number of permits
*/
Single<Void> setPermits(int permits);
/**
* Increases or decreases the number of available permits by defined value.
*
* @param permits - number of permits to add/remove
* @return void
*/
Completable addPermits(int permits);
/**
* Overrides and updates lease time for defined permit id.
*
* @param permitId - permit id
* @param leaseTime - permit lease time, use -1 to make it permanent
* @param unit - the time unit of the {@code timeout} argument
* @return <code>true</code> if permits has been updated successfully, otherwise <code>false</code>.
*/
Single<Boolean> updateLeaseTime(String permitId, long leaseTime, TimeUnit unit);
/**
* Returns lease time of the permitId
*
* @param permitId permit id
* @return lease time in millis or -1 if no lease time specified
* @throws IllegalArgumentException if permit id doesn't exist or has already been released.
*/
Single<Long> getLeaseTime(String permitId);
}
| RPermitExpirableSemaphoreRx |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractor.java | {
"start": 609,
"end": 1648
} | class ____ implements HitExtractor {
public static final HitExtractor INSTANCE = new ScoreExtractor();
/**
* Stands for {@code score}. We try to use short names for {@link HitExtractor}s
* to save a few bytes when when we send them back to the user.
*/
static final String NAME = "sc";
private ScoreExtractor() {}
@Override
public void writeTo(StreamOutput out) throws IOException {
// Nothing to write
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object extract(SearchHit hit) {
return hit.getScore();
}
@Override
public String hitName() {
return null;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
return true;
}
@Override
public int hashCode() {
return 31;
}
@Override
public String toString() {
return "SCORE";
}
}
| ScoreExtractor |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java | {
"start": 15521,
"end": 15890
} | class ____ extends BlockFactory {
ArrayBlockFactory(String keyToBufferDir, Configuration conf) {
super(keyToBufferDir, conf);
}
@Override
public DataBlock create(long index, int limit,
BlockUploadStatistics statistics)
throws IOException {
return new ByteArrayBlock(0, limit, statistics);
}
}
static | ArrayBlockFactory |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/multibindings/MapBinder.java | {
"start": 9599,
"end": 12807
} | class ____<K, V> implements ProviderWithDependencies<Map<K, Provider<V>>> {
private Map<K, Provider<V>> providerMap;
@SuppressWarnings("rawtypes") // code is silly stupid with generics
private final RealMapBinder binder;
private final Provider<Set<Entry<K, Provider<V>>>> provider;
@SuppressWarnings("rawtypes") // code is silly stupid with generics
MapBinderProviderWithDependencies(RealMapBinder binder, Provider<Set<Entry<K, Provider<V>>>> provider) {
this.binder = binder;
this.provider = provider;
}
@SuppressWarnings({ "unchecked", "unused" }) // code is silly stupid with generics
@Inject
public void initialize() {
binder.binder = null;
Map<K, Provider<V>> providerMapMutable = new LinkedHashMap<>();
for (Entry<K, Provider<V>> entry : provider.get()) {
Multibinder.checkConfiguration(
providerMapMutable.put(entry.getKey(), entry.getValue()) == null,
"Map injection failed due to duplicated key \"%s\"",
entry.getKey()
);
}
providerMap = Collections.unmodifiableMap(providerMapMutable);
}
@Override
public Map<K, Provider<V>> get() {
return providerMap;
}
}
@Override
@SuppressWarnings({ "rawtypes", "unchecked" }) // code is silly stupid with generics
public void configure(Binder binder) {
Multibinder.checkConfiguration(isInitialized() == false, "MapBinder was already initialized");
// binds a Map<K, Provider<V>> from a collection of Map<Entry<K, Provider<V>>
final Provider<Set<Entry<K, Provider<V>>>> entrySetProvider = binder.getProvider(entrySetBinder.getSetKey());
binder.bind(providerMapKey).toProvider(new MapBinderProviderWithDependencies(RealMapBinder.this, entrySetProvider));
final Provider<Map<K, Provider<V>>> mapProvider = binder.getProvider(providerMapKey);
binder.bind(mapKey).toProvider((ProviderWithDependencies<Map<K, V>>) () -> {
Map<K, V> map = new LinkedHashMap<>();
for (Entry<K, Provider<V>> entry : mapProvider.get().entrySet()) {
V value = entry.getValue().get();
K key = entry.getKey();
Multibinder.checkConfiguration(value != null, "Map injection failed due to null value for key \"%s\"", key);
map.put(key, value);
}
return Collections.unmodifiableMap(map);
});
}
private boolean isInitialized() {
return binder == null;
}
@Override
public boolean equals(Object o) {
return o instanceof RealMapBinder && ((RealMapBinder<?, ?>) o).mapKey.equals(mapKey);
}
@Override
public int hashCode() {
return mapKey.hashCode();
}
private static final | MapBinderProviderWithDependencies |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/ReferenceConfigTest.java | {
"start": 6558,
"end": 12585
} | class ____ {
private static final Logger logger = LoggerFactory.getLogger(ReferenceConfigTest.class);
private static String zkUrl1;
private static String zkUrl2;
private static String registryUrl1;
@BeforeAll
public static void beforeAll() {
int zkServerPort1 = 2181;
int zkServerPort2 = 2182;
zkUrl1 = "zookeeper://localhost:" + zkServerPort1;
zkUrl2 = "zookeeper://localhost:" + zkServerPort2;
registryUrl1 = "registry://localhost:" + zkServerPort1 + "?registry=zookeeper";
}
@BeforeEach
public void setUp() throws Exception {
DubboBootstrap.reset();
FrameworkModel.destroyAll();
SysProps.clear();
SysProps.setProperty("dubbo.metrics.enabled", "false");
SysProps.setProperty("dubbo.metrics.protocol", "disabled");
ApplicationModel.defaultModel().getApplicationConfigManager();
DubboBootstrap.getInstance();
}
@AfterEach
public void tearDown() throws IOException {
DubboBootstrap.reset();
FrameworkModel.destroyAll();
SysProps.clear();
Mockito.framework().clearInlineMocks();
}
/**
* Test whether the configuration required for the aggregation service reference meets expectations
*/
@Test
void testAppendConfig() {
ApplicationConfig applicationConfig = new ApplicationConfig();
applicationConfig.setName("application1");
applicationConfig.setVersion("v1");
applicationConfig.setOwner("owner1");
applicationConfig.setOrganization("bu1");
applicationConfig.setArchitecture("architecture1");
applicationConfig.setEnvironment("test");
applicationConfig.setCompiler("javassist");
applicationConfig.setLogger("log4j2");
applicationConfig.setDumpDirectory("/");
applicationConfig.setQosEnable(false);
applicationConfig.setQosHost("127.0.0.1");
applicationConfig.setQosPort(77777);
applicationConfig.setQosAcceptForeignIp(false);
Map<String, String> parameters = new HashMap<>();
parameters.put("key1", "value1");
parameters.put("key2", "value2");
applicationConfig.setParameters(parameters);
applicationConfig.setShutwait("5");
applicationConfig.setMetadataType("local");
applicationConfig.setRegisterConsumer(false);
applicationConfig.setRepository("repository1");
applicationConfig.setEnableFileCache(false);
applicationConfig.setProtocol("dubbo");
applicationConfig.setMetadataServicePort(88888);
applicationConfig.setMetadataServiceProtocol("tri");
applicationConfig.setLivenessProbe("livenessProbe");
applicationConfig.setReadinessProbe("readinessProb");
applicationConfig.setStartupProbe("startupProbe");
ReferenceConfig<DemoService> referenceConfig = new ReferenceConfig<>();
referenceConfig.setClient("netty");
referenceConfig.setGeneric(Boolean.FALSE.toString());
referenceConfig.setProtocol("dubbo");
referenceConfig.setInit(true);
referenceConfig.setLazy(false);
referenceConfig.setInjvm(false);
referenceConfig.setReconnect("reconnect");
referenceConfig.setSticky(false);
referenceConfig.setStub(DEFAULT_STUB_EVENT);
referenceConfig.setRouter("default");
referenceConfig.setReferAsync(true);
MonitorConfig monitorConfig = new MonitorConfig();
applicationConfig.setMonitor(monitorConfig);
ModuleConfig moduleConfig = new ModuleConfig();
moduleConfig.setMonitor("default");
moduleConfig.setName("module1");
moduleConfig.setOrganization("application1");
moduleConfig.setVersion("v1");
moduleConfig.setOwner("owner1");
ConsumerConfig consumerConfig = new ConsumerConfig();
consumerConfig.setClient("netty");
consumerConfig.setThreadpool("fixed");
consumerConfig.setCorethreads(200);
consumerConfig.setQueues(500);
consumerConfig.setThreads(300);
consumerConfig.setShareconnections(10);
consumerConfig.setUrlMergeProcessor("default");
consumerConfig.setReferThreadNum(20);
consumerConfig.setReferBackground(false);
referenceConfig.setConsumer(consumerConfig);
MethodConfig methodConfig = new MethodConfig();
methodConfig.setName("sayName");
methodConfig.setStat(1);
methodConfig.setRetries(0);
methodConfig.setExecutes(10);
methodConfig.setDeprecated(false);
methodConfig.setSticky(false);
methodConfig.setReturn(false);
methodConfig.setService("service");
methodConfig.setServiceId(DemoService.class.getName());
methodConfig.setParentPrefix("demo");
referenceConfig.setMethods(Collections.singletonList(methodConfig));
referenceConfig.setInterface(DemoService.class);
referenceConfig.getInterfaceClass();
referenceConfig.setCheck(false);
RegistryConfig registry = new RegistryConfig();
registry.setAddress(zkUrl1);
applicationConfig.setRegistries(Collections.singletonList(registry));
applicationConfig.setRegistryIds(registry.getId());
moduleConfig.setRegistries(Collections.singletonList(registry));
referenceConfig.setRegistry(registry);
DubboBootstrap dubboBootstrap = DubboBootstrap.newInstance(FrameworkModel.defaultModel());
dubboBootstrap
.application(applicationConfig)
.reference(referenceConfig)
.registry(registry)
.module(moduleConfig)
.initialize();
referenceConfig.init();
ServiceMetadata serviceMetadata = referenceConfig.getServiceMetadata();
// verify additional side parameter
Assertions.assertEquals(CONSUMER_SIDE, serviceMetadata.getAttachments().get(SIDE_KEY));
// verify additional | ReferenceConfigTest |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/internal/aether/ReverseTreeRepositoryListenerTest.java | {
"start": 1528,
"end": 4194
} | class ____ {
@Test
void isLocalRepositoryArtifactTest() {
File baseDir = new File("local/repository");
LocalRepository localRepository = new LocalRepository(baseDir);
RepositorySystemSession session = mock(RepositorySystemSession.class);
when(session.getLocalRepository()).thenReturn(localRepository);
Artifact localRepositoryArtifact = mock(Artifact.class);
when(localRepositoryArtifact.getFile()).thenReturn(new File(baseDir, "some/path/within"));
Artifact nonLocalReposioryArtifact = mock(Artifact.class);
when(nonLocalReposioryArtifact.getFile()).thenReturn(new File("something/completely/different"));
assertTrue(ReverseTreeRepositoryListener.isLocalRepositoryArtifactOrMissing(session, localRepositoryArtifact));
assertFalse(
ReverseTreeRepositoryListener.isLocalRepositoryArtifactOrMissing(session, nonLocalReposioryArtifact));
}
@Test
void isMissingArtifactTest() {
File baseDir = new File("local/repository");
LocalRepository localRepository = new LocalRepository(baseDir);
RepositorySystemSession session = mock(RepositorySystemSession.class);
when(session.getLocalRepository()).thenReturn(localRepository);
Artifact localRepositoryArtifact = mock(Artifact.class);
when(localRepositoryArtifact.getFile()).thenReturn(null);
assertTrue(ReverseTreeRepositoryListener.isLocalRepositoryArtifactOrMissing(session, localRepositoryArtifact));
}
@Test
void lookupCollectStepDataTest() {
RequestTrace doesNotHaveIt =
RequestTrace.newChild(null, "foo").newChild("bar").newChild("baz");
assertNull(ReverseTreeRepositoryListener.lookupCollectStepData(doesNotHaveIt));
final CollectStepData data = mock(CollectStepData.class);
RequestTrace haveItFirst = RequestTrace.newChild(null, data)
.newChild("foo")
.newChild("bar")
.newChild("baz");
assertSame(data, ReverseTreeRepositoryListener.lookupCollectStepData(haveItFirst));
RequestTrace haveItLast = RequestTrace.newChild(null, "foo")
.newChild("bar")
.newChild("baz")
.newChild(data);
assertSame(data, ReverseTreeRepositoryListener.lookupCollectStepData(haveItLast));
RequestTrace haveIt = RequestTrace.newChild(null, "foo")
.newChild("bar")
.newChild(data)
.newChild("baz");
assertSame(data, ReverseTreeRepositoryListener.lookupCollectStepData(haveIt));
}
}
| ReverseTreeRepositoryListenerTest |
java | google__auto | value/src/test/java/com/google/auto/value/extension/toprettystring/ToPrettyStringTest.java | {
"start": 19854,
"end": 21620
} | class ____ {
@Nullable
abstract com.google.common.base.Optional<Object> optional();
@ToPrettyString
abstract String toPrettyString();
}
@Test
public void guavaOptional_present() {
GuavaOptional valueType =
new AutoValue_ToPrettyStringTest_GuavaOptional(
com.google.common.base.Optional.of("hello, world"));
assertThat(valueType.toPrettyString())
.isEqualTo(
"GuavaOptional {" // force newline
+ "\n optional = hello, world,"
+ "\n}");
}
@Test
public void guavaOptional_absent() {
GuavaOptional valueType =
new AutoValue_ToPrettyStringTest_GuavaOptional(com.google.common.base.Optional.absent());
assertThat(valueType.toPrettyString())
.isEqualTo(
"GuavaOptional {" // force newline
+ "\n optional = <absent>,"
+ "\n}");
}
@Test
public void guavaOptional_valueWithNewlines() {
GuavaOptional valueType =
new AutoValue_ToPrettyStringTest_GuavaOptional(
com.google.common.base.Optional.of("optional\nwith\nnewline"));
assertThat(valueType.toPrettyString())
.isEqualTo(
"GuavaOptional {" // force newline
+ "\n optional = optional"
+ "\n with"
+ "\n newline,"
+ "\n}");
}
@Test
public void guavaOptional_null() {
@SuppressWarnings("NullOptional")
GuavaOptional valueType = new AutoValue_ToPrettyStringTest_GuavaOptional(null);
assertThat(valueType.toPrettyString())
.isEqualTo(
"GuavaOptional {" // force newline
+ "\n optional = null,"
+ "\n}");
}
@AutoValue
abstract static | GuavaOptional |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/configuration/injection/MockInjectionStrategy.java | {
"start": 284,
"end": 3053
} | class ____ {
/**
* NOP Strategy that will always try the next strategy.
*/
public static MockInjectionStrategy nop() {
return new MockInjectionStrategy() {
@Override
protected boolean processInjection(
Field field, Object fieldOwner, Set<Object> mockCandidates) {
return false;
}
};
}
private MockInjectionStrategy nextStrategy;
/**
* Enqueue next injection strategy.
*
* <p>
* The implementation should take care of the actual calling if required.
* </p>
*
* @param strategy Queued strategy.
* @return The passed strategy instance to allow chaining.
*/
public MockInjectionStrategy thenTry(MockInjectionStrategy strategy) {
if (nextStrategy != null) {
nextStrategy.thenTry(strategy);
} else {
nextStrategy = strategy;
}
return strategy;
}
/**
* Actually inject mockCandidates on field.
*
* <p>
* Actual algorithm is defined in the implementations of {@link #processInjection(Field, Object, Set)}.
* However if injection occurred successfully, the process should return <code>true</code>,
* and <code>false</code> otherwise.
* </p>
*
* <p>
* The code takes care of calling the next strategy if available and if of course if required
* </p>
*
* @param onField Field needing injection.
* @param fieldOwnedBy The owning instance of the field.
* @param mockCandidates A set of mock candidate, that might be injected.
* @return <code>true</code> if successful, <code>false</code> otherwise.
*/
public boolean process(Field onField, Object fieldOwnedBy, Set<Object> mockCandidates) {
if (processInjection(onField, fieldOwnedBy, mockCandidates)) {
return true;
}
return relayProcessToNextStrategy(onField, fieldOwnedBy, mockCandidates);
}
/**
* Process actual injection.
*
* <p>
* Don't call this method directly, instead call {@link #process(Field, Object, Set)}
* </p>
*
* @param field Field needing injection
* @param fieldOwner Field owner instance.
* @param mockCandidates Pool of mocks to inject.
* @return <code>true</code> if injection occurred, <code>false</code> otherwise
*/
protected abstract boolean processInjection(
Field field, Object fieldOwner, Set<Object> mockCandidates);
private boolean relayProcessToNextStrategy(
Field field, Object fieldOwner, Set<Object> mockCandidates) {
return nextStrategy != null && nextStrategy.process(field, fieldOwner, mockCandidates);
}
}
| MockInjectionStrategy |
java | grpc__grpc-java | cronet/src/main/java/io/grpc/cronet/CronetClientStream.java | {
"start": 6468,
"end": 8977
} | class ____ implements AbstractClientStream.Sink {
@Override
public void writeHeaders(Metadata metadata, byte[] payload) {
startCallback.run();
// streamFactory will be set by startCallback, unless the transport is in go-away state
if (streamFactory == null) {
return;
}
BidirectionalStreamCallback callback = new BidirectionalStreamCallback();
String path = url;
if (payload != null) {
path += "?" + BaseEncoding.base64().encode(payload);
}
BidirectionalStream.Builder builder =
streamFactory.newBidirectionalStreamBuilder(path, callback, executor);
if (payload != null) {
builder.setHttpMethod("GET");
} else if (idempotent) {
builder.setHttpMethod("PUT");
}
if (delayRequestHeader) {
builder.delayRequestHeadersUntilFirstFlush(true);
}
if (annotation != null || annotations != null) {
if (annotation != null) {
builder.addRequestAnnotation(annotation);
}
if (annotations != null) {
for (Object o : annotations) {
builder.addRequestAnnotation(o);
}
}
}
setGrpcHeaders(builder);
stream = builder.build();
stream.start();
}
@Override
public void writeFrame(
WritableBuffer buffer, boolean endOfStream, boolean flush, int numMessages) {
synchronized (state.lock) {
if (state.cancelSent) {
return;
}
ByteBuffer byteBuffer;
if (buffer != null) {
byteBuffer = ((CronetWritableBuffer) buffer).buffer();
((Buffer) byteBuffer).flip();
} else {
byteBuffer = EMPTY_BUFFER;
}
onSendingBytes(byteBuffer.remaining());
if (!state.streamReady) {
state.enqueuePendingData(new PendingData(byteBuffer, endOfStream, flush));
} else {
streamWrite(byteBuffer, endOfStream, flush);
}
}
}
@Override
public void cancel(Status reason) {
synchronized (state.lock) {
if (state.cancelSent) {
return;
}
state.cancelSent = true;
state.cancelReason = reason;
state.clearPendingData();
if (stream != null) {
// Will report stream finish when BidirectionalStreamCallback.onCanceled is called.
stream.cancel();
} else {
transport.finishStream(CronetClientStream.this, reason);
}
}
}
}
| Sink |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/notify/NotifyCenter.java | {
"start": 11056,
"end": 11901
} | class ____ type of the event type.
* @param event event instance.
*/
private static boolean publishEvent(final Class<? extends Event> eventType, final Event event) {
if (ClassUtils.isAssignableFrom(SlowEvent.class, eventType)) {
return INSTANCE.sharePublisher.publish(event);
}
final String topic = ClassUtils.getCanonicalName(eventType);
EventPublisher publisher = INSTANCE.publisherMap.get(topic);
if (publisher != null) {
return publisher.publish(event);
}
if (event.isPluginEvent()) {
return true;
}
LOGGER.warn("There are no [{}] publishers for this event, please register", topic);
return false;
}
/**
* Register to share-publisher.
*
* @param eventType | Instances |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/impl/VertxImpl.java | {
"start": 38326,
"end": 44886
} | class ____ implements Handler<Void>, Closeable, Runnable {
private final Handler<Long> handler;
private final boolean periodic;
private final long id;
private final ContextInternal context;
private final AtomicBoolean disposed = new AtomicBoolean();
private volatile java.util.concurrent.Future<?> future;
InternalTimerHandler(long id, Handler<Long> runnable, boolean periodic, ContextInternal context) {
this.context = context;
this.id = id;
this.handler = runnable;
this.periodic = periodic;
}
@Override
public void run() {
ContextInternal dispatcher = context;
if (periodic) {
dispatcher = dispatcher.duplicate();
}
dispatcher.emit(this);
}
public void handle(Void v) {
if (periodic) {
if (!disposed.get()) {
handler.handle(id);
}
} else if (disposed.compareAndSet(false, true)) {
timeouts.remove(id);
try {
handler.handle(id);
} finally {
// Clean up after it's fired
context.removeCloseHook(this);
}
}
}
private boolean cancel() {
boolean cancelled = tryCancel();
if (cancelled) {
if (context.isDeployment()) {
context.removeCloseHook(this);
}
}
return cancelled;
}
private boolean tryCancel() {
if (disposed.compareAndSet(false, true)) {
timeouts.remove(id);
future.cancel(false);
return true;
} else {
return false;
}
}
// Called via Context close hook when Verticle is undeployed
public void close(Completable<Void> completion) {
tryCancel();
completion.succeed();
}
}
@Override
public WorkerExecutorImpl createSharedWorkerExecutor(String name) {
return createSharedWorkerExecutor(name, defaultWorkerPoolSize);
}
@Override
public WorkerExecutorImpl createSharedWorkerExecutor(String name, int poolSize) {
return createSharedWorkerExecutor(name, poolSize, maxWorkerExecTime);
}
@Override
public synchronized WorkerExecutorImpl createSharedWorkerExecutor(String name, int poolSize, long maxExecuteTime) {
return createSharedWorkerExecutor(name, poolSize, maxExecuteTime, maxWorkerExecTimeUnit);
}
@Override
public synchronized WorkerExecutorImpl createSharedWorkerExecutor(String name, int poolSize, long maxExecuteTime, TimeUnit maxExecuteTimeUnit) {
CloseFuture execCf = new CloseFuture();
WorkerPool sharedWorkerPool = createSharedWorkerPool(execCf, name, poolSize, maxExecuteTime, maxExecuteTimeUnit);
CloseFuture parentCf = resolveCloseFuture();
parentCf.add(execCf);
return new WorkerExecutorImpl(this, cleaner, sharedWorkerPool);
}
public WorkerPool createSharedWorkerPool(String name, int poolSize, long maxExecuteTime, TimeUnit maxExecuteTimeUnit) {
return createSharedWorkerPool(new CloseFuture(), name, poolSize, maxExecuteTime, maxExecuteTimeUnit);
}
private synchronized WorkerPool createSharedWorkerPool(CloseFuture closeFuture, String name, int poolSize, long maxExecuteTime, TimeUnit maxExecuteTimeUnit) {
if (poolSize < 1) {
throw new IllegalArgumentException("poolSize must be > 0");
}
if (maxExecuteTime < 1) {
throw new IllegalArgumentException("maxExecuteTime must be > 0");
}
WorkerPool shared = createSharedResource("__vertx.shared.workerPools", name, closeFuture, cf -> {
ThreadFactory workerThreadFactory = createThreadFactory(threadFactory, checker, useDaemonThread, maxExecuteTime, maxExecuteTimeUnit, name + "-", true);
ExecutorService workerExec = executorServiceFactory.createExecutor(workerThreadFactory, poolSize, poolSize);
PoolMetrics workerMetrics = metrics != null ? metrics.createPoolMetrics("worker", name, poolSize) : null;
WorkerPool pool = new WorkerPool(workerExec, workerMetrics);
cf.add(completion -> {
pool.close();
completion.succeed();
});
return pool;
});
return new WorkerPool(shared.executor(), shared.metrics()) {
@Override
public void close() {
closeFuture.close();
}
};
}
@Override
public WorkerPool wrapWorkerPool(ExecutorService executor) {
PoolMetrics workerMetrics = metrics != null ? metrics.createPoolMetrics( "worker", null, -1) : null;
return new WorkerPool(executor, workerMetrics);
}
private ThreadFactory createThreadFactory(VertxThreadFactory threadFactory, BlockedThreadChecker checker, Boolean useDaemonThread, long maxExecuteTime, TimeUnit maxExecuteTimeUnit, String prefix, boolean worker) {
AtomicInteger threadCount = new AtomicInteger(0);
return runnable -> {
VertxThread thread = threadFactory.newVertxThread(runnable, prefix + threadCount.getAndIncrement(), worker, maxExecuteTime, maxExecuteTimeUnit);
thread.owner = VertxImpl.this;
checker.registerThread(thread, thread.info);
if (useDaemonThread != null && thread.isDaemon() != useDaemonThread) {
thread.setDaemon(useDaemonThread);
}
return thread;
};
}
@Override
public Vertx exceptionHandler(Handler<Throwable> handler) {
exceptionHandler = handler;
return this;
}
@Override
public Handler<Throwable> exceptionHandler() {
return exceptionHandler;
}
@Override
public CloseFuture closeFuture() {
return closeFuture;
}
@Override
public VertxTracer tracer() {
return tracer;
}
@Override
public void addCloseHook(Closeable hook) {
closeFuture.add(hook);
}
@Override
public void removeCloseHook(Closeable hook) {
closeFuture.remove(hook);
}
@Override
public boolean isVirtualThreadAvailable() {
return virtualThreadExecutor != null;
}
private CloseFuture resolveCloseFuture() {
ContextInternal context = getContext();
return context != null ? context.closeFuture() : closeFuture;
}
/**
* Execute the {@code task} disabling the thread-local association for the duration
* of the execution. {@link Vertx#currentContext()} will return {@code null},
* @param task the task to execute
* @throws IllegalStateException if the current thread is not a Vertx thread
*/
void executeIsolated(Handler<Void> task) {
if (Thread.currentThread() instanceof VertxThread) {
ContextInternal prev = beginDispatch(null);
try {
task.handle(null);
} finally {
endDispatch(prev);
}
} else {
task.handle(null);
}
}
static | InternalTimerHandler |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.