language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/chararray/CharArrayAssert_containsSubsequence_with_Character_array_Test.java | {
"start": 1201,
"end": 1972
} | class ____ extends CharArrayAssertBaseTest {
@Test
void should_fail_if_values_is_null() {
// GIVEN
Character[] subsequence = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertions.containsSubsequence(subsequence));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("subsequence").create());
}
@Override
protected CharArrayAssert invoke_api_method() {
return assertions.containsSubsequence(new Character[] { 'a', 'b', 'c' });
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContainsSubsequence(getInfo(assertions), getActual(assertions), arrayOf('a', 'b', 'c'));
}
}
| CharArrayAssert_containsSubsequence_with_Character_array_Test |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/index/PersistentClassIndex.java | {
"start": 211,
"end": 445
} | class ____ {
final Map<DotName, Optional<ClassInfo>> additionalClasses = new ConcurrentHashMap<>();
public Map<DotName, Optional<ClassInfo>> getAdditionalClasses() {
return additionalClasses;
}
}
| PersistentClassIndex |
java | quarkusio__quarkus | integration-tests/reactive-messaging-context-propagation/src/main/java/io/quarkus/it/kafka/contextual/FlowersContextualReceivers.java | {
"start": 575,
"end": 2441
} | class ____ {
@Inject
RequestBean reqBean;
@Inject
Logger logger;
@Incoming("contextual-flower")
void processContextual(String id) {
Context ctx = Vertx.currentContext();
Log.info(ctx + "[" + ctx.getClass() + "]");
Log.infof("bean: %s, id: %s", reqBean, reqBean.getId());
logger.infof("Hello, %s", reqBean.getName());
assert Objects.equals(id, reqBean.getId());
}
@Blocking
@Incoming("contextual-flower-blocking")
void processContextualBlocking(String id) {
Context ctx = Vertx.currentContext();
assert Context.isOnWorkerThread();
Log.info(ctx + "[" + ctx.getClass() + "]");
Log.infof("bean: %s, id: %s", reqBean, reqBean.getId());
logger.infof("Hello, %s", reqBean.getName());
assert Objects.equals(id, reqBean.getId());
}
@Blocking("named-pool")
@Incoming("contextual-flower-blocking-named")
void processContextualBlockingNamed(String id) {
Context ctx = Vertx.currentContext();
assert Context.isOnWorkerThread();
Log.info(ctx + "[" + ctx.getClass() + "]");
Log.infof("bean: %s, id: %s", reqBean, reqBean.getId());
logger.infof("Hello, %s", reqBean.getName());
assert Objects.equals(id, reqBean.getId());
}
@RunOnVirtualThread
@Incoming("contextual-flower-virtual-thread")
void processContextualVT(String id) {
Context ctx = Vertx.currentContext();
VirtualThreadsAssertions.assertThatItRunsOnVirtualThread();
VirtualThreadsAssertions.assertThatItRunsOnADuplicatedContext();
Log.info(ctx + "[" + ctx.getClass() + "]");
Log.infof("bean: %s, id: %s", reqBean, reqBean.getId());
logger.infof("Hello, %s", reqBean.getName());
assert Objects.equals(id, reqBean.getId());
}
}
| FlowersContextualReceivers |
java | apache__rocketmq | store/src/main/java/org/apache/rocketmq/store/CompactionAppendMsgCallback.java | {
"start": 874,
"end": 1022
} | interface ____ {
AppendMessageResult doAppend(ByteBuffer bbDest, long fileFromOffset, int maxBlank, ByteBuffer bbSrc);
}
| CompactionAppendMsgCallback |
java | google__guava | guava/src/com/google/common/collect/CollectCollectors.java | {
"start": 1535,
"end": 3719
} | class ____ {
private static final Collector<Object, ?, ImmutableList<Object>> TO_IMMUTABLE_LIST =
Collector.of(
ImmutableList::builder,
ImmutableList.Builder::add,
ImmutableList.Builder::combine,
ImmutableList.Builder::build);
private static final Collector<Object, ?, ImmutableSet<Object>> TO_IMMUTABLE_SET =
Collector.of(
ImmutableSet::builder,
ImmutableSet.Builder::add,
ImmutableSet.Builder::combine,
ImmutableSet.Builder::build);
@GwtIncompatible
private static final Collector<Range<Comparable<?>>, ?, ImmutableRangeSet<Comparable<?>>>
TO_IMMUTABLE_RANGE_SET =
Collector.of(
ImmutableRangeSet::builder,
ImmutableRangeSet.Builder::add,
ImmutableRangeSet.Builder::combine,
ImmutableRangeSet.Builder::build);
// Lists
@SuppressWarnings({"rawtypes", "unchecked"})
static <E> Collector<E, ?, ImmutableList<E>> toImmutableList() {
return (Collector) TO_IMMUTABLE_LIST;
}
// Sets
@SuppressWarnings({"rawtypes", "unchecked"})
static <E> Collector<E, ?, ImmutableSet<E>> toImmutableSet() {
return (Collector) TO_IMMUTABLE_SET;
}
static <E> Collector<E, ?, ImmutableSortedSet<E>> toImmutableSortedSet(
Comparator<? super E> comparator) {
checkNotNull(comparator);
return Collector.of(
() -> new ImmutableSortedSet.Builder<E>(comparator),
ImmutableSortedSet.Builder::add,
ImmutableSortedSet.Builder::combine,
ImmutableSortedSet.Builder::build);
}
@SuppressWarnings({"rawtypes", "unchecked"})
static <E extends Enum<E>> Collector<E, ?, ImmutableSet<E>> toImmutableEnumSet() {
return (Collector) EnumSetAccumulator.TO_IMMUTABLE_ENUM_SET;
}
private static <E extends Enum<E>>
Collector<E, EnumSetAccumulator<E>, ImmutableSet<E>> toImmutableEnumSetGeneric() {
return Collector.of(
EnumSetAccumulator::new,
EnumSetAccumulator::add,
EnumSetAccumulator::combine,
EnumSetAccumulator::toImmutableSet,
Collector.Characteristics.UNORDERED);
}
private static final | CollectCollectors |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/runtime/operators/coordination/OperatorEventSendingCheckpointITCase.java | {
"start": 13795,
"end": 14588
} | interface ____ {
CompletableFuture<Acknowledge> handleEvent(
ExecutionAttemptID task,
OperatorID operator,
SerializedValue<OperatorEvent> evt,
TriFunction<
ExecutionAttemptID,
OperatorID,
SerializedValue<OperatorEvent>,
CompletableFuture<Acknowledge>>
rpcHandler);
}
}
// ------------------------------------------------------------------------
// Utils for MiniCluster RPC intercepting
// ------------------------------------------------------------------------
private static final | FilteredRpcAction |
java | quarkusio__quarkus | integration-tests/spring-data-jpa/src/test/java/io/quarkus/it/spring/data/jpa/SongResourceIT.java | {
"start": 125,
"end": 176
} | class ____ extends SongResourceTest {
}
| SongResourceIT |
java | apache__camel | dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/download/TransactedDownloader.java | {
"start": 2674,
"end": 2981
} | class ____ implements TransactedPolicy {
@Override
public void beforeWrap(final Route route, final NamedNode definition) {
}
@Override
public Processor wrap(final Route route, final Processor processor) {
return null;
}
}
}
| DummyTransactedPolicy |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/spring/tx/error/JMXTXUseOriginalBodyWithTXErrorHandlerIT.java | {
"start": 1543,
"end": 3172
} | class ____ extends JMXTXUseOriginalBodyIT {
@EndpointInject("mock:end")
protected MockEndpoint endpoint;
@EndpointInject("mock:error")
protected MockEndpoint error;
@EndpointInject("mock:checkpoint1")
protected MockEndpoint checkpoint1;
@EndpointInject("mock:checkpoint2")
protected MockEndpoint checkpoint2;
@Produce("activemq:JMXTXUseOriginalBodyWithTXErrorHandlerIT.start")
protected ProducerTemplate start;
@Produce("activemq:JMXTXUseOriginalBodyWithTXErrorHandlerIT.broken")
protected ProducerTemplate broken;
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"/org/apache/camel/component/jms/integration/spring/tx/error/JMXTXUseOriginalBodyWithTXErrorHandlerIT.xml");
}
@Override
@Test
public void testWithConstant() throws InterruptedException {
endpoint.expectedMessageCount(0);
error.expectedBodiesReceived("foo");
checkpoint1.expectedBodiesReceived("foo");
checkpoint2.expectedBodiesReceived("oh no");
start.sendBody("foo");
MockEndpoint.assertIsSatisfied(context);
}
@Override
@Test
public void testWithBean() throws InterruptedException {
endpoint.expectedMessageCount(0);
error.expectedBodiesReceived("foo");
checkpoint1.expectedBodiesReceived("foo");
checkpoint2.expectedBodiesReceived("oh no");
broken.sendBody("foo");
MockEndpoint.assertIsSatisfied(context);
}
public static | JMXTXUseOriginalBodyWithTXErrorHandlerIT |
java | quarkusio__quarkus | extensions/security-webauthn/runtime/src/main/java/io/quarkus/security/webauthn/WebAuthnUserProvider.java | {
"start": 169,
"end": 374
} | interface ____ order to tell Quarkus WebAuthn how to look up
* WebAuthn credentials, store new credentials, or update the credentials' counter,
* as well as what roles those credentials map to.
*/
public | in |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/AggregationErrors.java | {
"start": 2876,
"end": 6503
} | class ____)
*
* @param aggregationName - The name of the aggregation
* @param position - optional, for multisource aggregations. Indicates the position of the field causing the problem.
* @return - an appropriate exception
*/
public static RuntimeException reduceTypeMismatch(String aggregationName, Optional<Integer> position) {
String fieldString;
if (position.isPresent()) {
fieldString = "the field in position" + position.get().toString();
} else {
fieldString = "the field you gave";
}
return new IllegalArgumentException(
"Merging/Reducing the aggregations failed when computing the aggregation ["
+ aggregationName
+ "] because "
+ fieldString
+ " in the aggregation query existed as two different "
+ "types in two different indices"
);
}
public static RuntimeException valuesSourceDoesNotSupportScritps(String typeName) {
return new IllegalArgumentException("value source of type [" + typeName + "] is not supported by scripts");
}
public static RuntimeException unsupportedScriptValue(String actual) {
return new IllegalArgumentException("Unsupported script value [" + actual + "], expected a number, date, or boolean");
}
/**
* Indicates that a multivalued field was found where we only support a single valued field
* @return an appropriate exception
*/
public static RuntimeException unsupportedMultivalue() {
return new IllegalArgumentException(
"Encountered more than one value for a "
+ "single document. Use a script to combine multiple values per doc into a single value."
);
}
/**
* Indicates the given values source is not suitable for use in a multivalued aggregation. This is not retryable.
* @param source a string describing the Values Source
* @return an appropriate exception
*/
public static RuntimeException unsupportedMultivalueValuesSource(String source) {
throw new IllegalArgumentException("ValuesSource type " + source + "is not supported for multi-valued aggregation");
}
/**
* Indicates an attempt to use date rounding on a non-date values source
* @param typeName - name of the type we're attempting to round
* @return an appropriate exception
*/
public static RuntimeException unsupportedRounding(String typeName) {
return new IllegalArgumentException("can't round a [" + typeName + "]");
}
/**
* Indicates that an aggregation path (e.g. from a pipeline agg) references an aggregation of the wrong type, for example
* attempting to take a cumulative cardinality of something other than a cardinality aggregation.
*
* @param aggPath the path element found to be invalid
* @param expected
* @param got What we actually got; this may be null.
* @param currentAgg The name of the aggregation in question
* @return an appropriate exception
*/
public static RuntimeException incompatibleAggregationType(String aggPath, String expected, String got, String currentAgg) {
return new AggregationExecutionException.InvalidPath(
aggPath
+ " must reference a "
+ expected
+ " aggregation, got: ["
+ (got == null ? "null" : got)
+ "] at aggregation ["
+ currentAgg
+ "]"
);
}
/**
* This is a 500 | error |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/simple/SimpleProperties.java | {
"start": 1212,
"end": 1905
} | class ____ {
/**
* Whether exporting of metrics to this backend is enabled.
*/
private boolean enabled = true;
/**
* Step size (i.e. reporting frequency) to use.
*/
private Duration step = Duration.ofMinutes(1);
/**
* Counting mode.
*/
private CountingMode mode = CountingMode.CUMULATIVE;
public boolean isEnabled() {
return this.enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public Duration getStep() {
return this.step;
}
public void setStep(Duration step) {
this.step = step;
}
public CountingMode getMode() {
return this.mode;
}
public void setMode(CountingMode mode) {
this.mode = mode;
}
}
| SimpleProperties |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KeycloakEndpointBuilderFactory.java | {
"start": 86576,
"end": 86883
} | interface ____
extends
AdvancedKeycloakEndpointConsumerBuilder,
AdvancedKeycloakEndpointProducerBuilder {
default KeycloakEndpointBuilder basic() {
return (KeycloakEndpointBuilder) this;
}
}
public | AdvancedKeycloakEndpointBuilder |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/errors/TransactionExceptionHierarchyTest.java | {
"start": 4292,
"end": 5184
} | class ____ check
*/
@ParameterizedTest
@ValueSource(classes = {
AuthenticationException.class,
AuthorizationException.class,
ClusterAuthorizationException.class,
TransactionalIdAuthorizationException.class,
UnsupportedVersionException.class,
UnsupportedForMessageFormatException.class,
InvalidRecordException.class,
InvalidRequiredAcksException.class,
RecordBatchTooLargeException.class,
InvalidTopicException.class,
TopicAuthorizationException.class,
GroupAuthorizationException.class
})
void testInvalidConfigurationExceptionHierarchy(Class<? extends Exception> exceptionClass) {
assertTrue(InvalidConfigurationException.class.isAssignableFrom(exceptionClass),
exceptionClass.getSimpleName() + " should extend InvalidConfigurationException");
}
}
| to |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/outlierdetection/ScoreByThresholdResult.java | {
"start": 812,
"end": 2279
} | class ____ implements EvaluationMetricResult {
public static final ParseField NAME = new ParseField("score_by_threshold_result");
private final String name;
private final double[] thresholds;
private final double[] scores;
public ScoreByThresholdResult(String name, double[] thresholds, double[] scores) {
assert thresholds.length == scores.length;
this.name = Objects.requireNonNull(name);
this.thresholds = thresholds;
this.scores = scores;
}
public ScoreByThresholdResult(StreamInput in) throws IOException {
this.name = in.readString();
this.thresholds = in.readDoubleArray();
this.scores = in.readDoubleArray();
}
@Override
public String getWriteableName() {
return registeredMetricName(OutlierDetection.NAME, NAME);
}
@Override
public String getMetricName() {
return name;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeDoubleArray(thresholds);
out.writeDoubleArray(scores);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
for (int i = 0; i < thresholds.length; i++) {
builder.field(String.valueOf(thresholds[i]), scores[i]);
}
builder.endObject();
return builder;
}
}
| ScoreByThresholdResult |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InjectionPointInfo.java | {
"start": 17803,
"end": 17884
} | enum ____ {
CDI,
RESOURCE
}
public static | InjectionPointKind |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ExtendsAutoValueTest.java | {
"start": 5772,
"end": 6053
} | class ____ extends AutoClass {}
""")
.doTest();
}
@Test
public void extendsAutoValue_badNoImport() {
helper
.addSourceLines(
"TestClass.java",
"""
@com.google.auto.value.AutoValue
abstract | TestClass |
java | netty__netty | example/src/main/java/io/netty/example/http2/tiles/HttpServer.java | {
"start": 1441,
"end": 2572
} | class ____ {
public static final int PORT = Integer.parseInt(System.getProperty("http-port", "8080"));
private static final int MAX_CONTENT_LENGTH = 1024 * 100;
private final EventLoopGroup group;
public HttpServer(EventLoopGroup eventLoopGroup) {
group = eventLoopGroup;
}
public ChannelFuture start() throws Exception {
ServerBootstrap b = new ServerBootstrap();
b.option(ChannelOption.SO_BACKLOG, 1024);
b.group(group).channel(NioServerSocketChannel.class).handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(new HttpRequestDecoder(),
new HttpResponseEncoder(),
new HttpObjectAggregator(MAX_CONTENT_LENGTH),
new Http1RequestHandler());
}
});
Channel ch = b.bind(PORT).sync().channel();
return ch.closeFuture();
}
}
| HttpServer |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/mediatype/CharsetTest.java | {
"start": 2176,
"end": 3277
} | class ____ {
@Path("text")
@Produces("text/plain")
@GET
public String textPlain() {
return "text";
}
@Path("response/text")
@Produces("text/plain")
@GET
public Response responseTextPlain() {
return Response.ok("text").build();
}
@Path("json")
@Produces("application/json")
@GET
public String json() {
return "{\"foo\": \"bar\"}";
}
@Path("response/json")
@Produces("application/json")
@GET
public Response responseJson() {
return Response.ok("{\"foo\": \"bar\"}").build();
}
@Path("image")
@Produces("image/png")
@GET
public Response imagePng() {
return Response.ok("fake image".getBytes(StandardCharsets.UTF_8)).build();
}
@Path("response/image")
@Produces("image/png")
@GET
public byte[] responseImagePng() {
return "fake image".getBytes(StandardCharsets.UTF_8);
}
}
}
| TestResource |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/DefaultExtJSONParserTest.java | {
"start": 16706,
"end": 16949
} | class ____ {
private Reader reader;
public Reader getReader() {
return reader;
}
public void setReader(Reader reader) {
this.reader = reader;
}
}
public static | ErrorObject |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/arc/IntercepredNormalScopedFoo.java | {
"start": 316,
"end": 489
} | class ____ {
private int val;
public int ping() {
return val;
}
@PostConstruct
void init() {
val = 42;
}
}
| IntercepredNormalScopedFoo |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java | {
"start": 42094,
"end": 48405
} | class ____ extends Plugin implements SearchPlugin {
@Override
public List<ScoreFunctionSpec<?>> getScoreFunctions() {
return singletonList(
new ScoreFunctionSpec<>(
RandomScoreFunctionBuilderWithFixedSeed.NAME,
RandomScoreFunctionBuilderWithFixedSeed::new,
RandomScoreFunctionBuilderWithFixedSeed::fromXContent
)
);
}
}
/**
* Check that this query is generally cacheable except for builders using {@link ScriptScoreFunctionBuilder} or
* {@link RandomScoreFunctionBuilder} without a seed
*/
@Override
public void testCacheability() throws IOException {
Directory directory = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), directory);
iw.addDocument(new Document());
final IndexSearcher searcher = newSearcher(iw.getReader());
iw.close();
assertThat(searcher.getIndexReader().leaves().size(), greaterThan(0));
FunctionScoreQueryBuilder queryBuilder = createTestQueryBuilder();
boolean requestCache = isCacheable(queryBuilder);
SearchExecutionContext context = createSearchExecutionContext(searcher);
QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new SearchExecutionContext(context));
assertNotNull(rewriteQuery.toQuery(context));
// we occasionally need to update the expected request cache flag after rewrite to MatchNoneQueryBuilder
if (rewriteQuery instanceof MatchNoneQueryBuilder) {
requestCache = true;
}
assertEquals(
"query should " + (requestCache ? "" : "not") + " be eligible for the request cache: " + queryBuilder.toString(),
requestCache,
context.isCacheable()
);
// test query cache
if (rewriteQuery instanceof MatchNoneQueryBuilder == false) {
Query luceneQuery = rewriteQuery.toQuery(context);
Weight queryWeight = context.searcher().createWeight(searcher.rewrite(luceneQuery), ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext ctx : context.getIndexReader().leaves()) {
assertFalse(queryWeight.isCacheable(ctx));
}
}
ScoreFunctionBuilder<?> scriptScoreFunction = new ScriptScoreFunctionBuilder(
new Script(ScriptType.INLINE, MockScriptEngine.NAME, "1", Collections.emptyMap())
);
queryBuilder = new FunctionScoreQueryBuilder(
new FilterFunctionBuilder[] { new FilterFunctionBuilder(RandomQueryBuilder.createQuery(random()), scriptScoreFunction) }
);
context = createSearchExecutionContext(searcher);
rewriteQuery = rewriteQuery(queryBuilder, new SearchExecutionContext(context));
assertNotNull(rewriteQuery.toQuery(context));
assertTrue("function script query should be eligible for the request cache: " + queryBuilder.toString(), context.isCacheable());
// test query cache
if (rewriteQuery instanceof MatchNoneQueryBuilder == false) {
Query luceneQuery = rewriteQuery.toQuery(context);
Weight queryWeight = context.searcher().createWeight(searcher.rewrite(luceneQuery), ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext ctx : context.getIndexReader().leaves()) {
assertFalse(queryWeight.isCacheable(ctx));
}
}
RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilderWithFixedSeed();
queryBuilder = new FunctionScoreQueryBuilder(
new FilterFunctionBuilder[] { new FilterFunctionBuilder(RandomQueryBuilder.createQuery(random()), randomScoreFunctionBuilder) }
);
context = createSearchExecutionContext(searcher);
rewriteQuery = rewriteQuery(queryBuilder, new SearchExecutionContext(context));
assertNotNull(rewriteQuery.toQuery(context));
assertFalse(
"function random query should not be eligible for the request cache: " + queryBuilder.toString(),
context.isCacheable()
);
// test query cache
if (rewriteQuery instanceof MatchNoneQueryBuilder == false) {
Query luceneQuery = rewriteQuery.toQuery(context);
Weight queryWeight = context.searcher().createWeight(searcher.rewrite(luceneQuery), ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext ctx : context.getIndexReader().leaves()) {
assertFalse(queryWeight.isCacheable(ctx));
}
}
searcher.getIndexReader().close();
directory.close();
}
private boolean isCacheable(FunctionScoreQueryBuilder queryBuilder) {
FilterFunctionBuilder[] filterFunctionBuilders = queryBuilder.filterFunctionBuilders();
for (FilterFunctionBuilder builder : filterFunctionBuilders) {
if (builder.getScoreFunction() instanceof RandomScoreFunctionBuilder
&& ((RandomScoreFunctionBuilder) builder.getScoreFunction()).getSeed() == null) {
return false;
}
}
return true;
}
@Override
public void testMustRewrite() throws IOException {
SearchExecutionContext context = createSearchExecutionContext();
context.setAllowUnmappedFields(true);
TermQueryBuilder termQueryBuilder = new TermQueryBuilder("unmapped_field", "foo");
// main query needs rewriting
FunctionScoreQueryBuilder functionQueryBuilder1 = new FunctionScoreQueryBuilder(termQueryBuilder);
functionQueryBuilder1.setMinScore(1);
IllegalStateException e = expectThrows(IllegalStateException.class, () -> functionQueryBuilder1.toQuery(context));
assertEquals("Rewrite first", e.getMessage());
// filter needs rewriting
FunctionScoreQueryBuilder functionQueryBuilder2 = new FunctionScoreQueryBuilder(
new MatchAllQueryBuilder(),
new FilterFunctionBuilder[] { new FilterFunctionBuilder(termQueryBuilder, new RandomScoreFunctionBuilder()) }
);
e = expectThrows(IllegalStateException.class, () -> functionQueryBuilder2.toQuery(context));
assertEquals("Rewrite first", e.getMessage());
}
}
| TestPlugin |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractDeleteTest.java | {
"start": 1029,
"end": 4287
} | class ____ extends
AbstractFSContractTestBase {
@Test
public void testDeleteEmptyDirNonRecursive() throws Throwable {
Path path = path("testDeleteEmptyDirNonRecursive");
mkdirs(path);
assertDeleted(path, false);
}
@Test
public void testDeleteEmptyDirRecursive() throws Throwable {
Path path = path("testDeleteEmptyDirRecursive");
mkdirs(path);
assertDeleted(path, true);
}
@Test
public void testDeleteNonexistentPathRecursive() throws Throwable {
Path path = path("testDeleteNonexistentPathRecursive");
assertPathDoesNotExist("leftover", path);
ContractTestUtils.rejectRootOperation(path);
assertFalse(getFileSystem().delete(path, true),
"Returned true attempting to recursively delete"
+ " a nonexistent path " + path);
}
@Test
public void testDeleteNonexistentPathNonRecursive() throws Throwable {
Path path = path("testDeleteNonexistentPathNonRecursive");
assertPathDoesNotExist("leftover", path);
ContractTestUtils.rejectRootOperation(path);
assertFalse(getFileSystem().delete(path, false),
"Returned true attempting to non recursively delete"
+ " a nonexistent path " + path);
}
@Test
public void testDeleteNonEmptyDirNonRecursive() throws Throwable {
Path path = path("testDeleteNonEmptyDirNonRecursive");
mkdirs(path);
Path file = new Path(path, "childfile");
ContractTestUtils.writeTextFile(getFileSystem(), file, "goodbye, world",
true);
try {
ContractTestUtils.rejectRootOperation(path);
boolean deleted = getFileSystem().delete(path, false);
fail("non recursive delete should have raised an exception," +
" but completed with exit code " + deleted);
} catch (IOException expected) {
//expected
handleExpectedException(expected);
}
assertIsDirectory(path);
}
@Test
public void testDeleteNonEmptyDirRecursive() throws Throwable {
Path path = path("testDeleteNonEmptyDirRecursive");
mkdirs(path);
Path file = new Path(path, "childfile");
ContractTestUtils.writeTextFile(getFileSystem(), file, "goodbye, world",
true);
assertDeleted(path, true);
assertPathDoesNotExist("not deleted", file);
}
@Test
public void testDeleteDeepEmptyDir() throws Throwable {
mkdirs(path("testDeleteDeepEmptyDir/d1/d2/d3/d4"));
assertDeleted(path("testDeleteDeepEmptyDir/d1/d2/d3"), true);
assertPathDoesNotExist(
"not deleted", path("testDeleteDeepEmptyDir/d1/d2/d3/d4"));
assertPathDoesNotExist(
"not deleted", path("testDeleteDeepEmptyDir/d1/d2/d3"));
assertPathExists( "parent dir is deleted",
path("testDeleteDeepEmptyDir/d1/d2"));
}
@Test
public void testDeleteSingleFile() throws Throwable {
// Test delete of just a file
Path path = path("testDeleteSingleFile/d1/d2");
mkdirs(path);
Path file = new Path(path, "childfile");
ContractTestUtils.writeTextFile(getFileSystem(), file,
"single file to be deleted.", true);
assertPathExists("single file not created", file);
assertDeleted(file, false);
}
}
| AbstractContractDeleteTest |
java | netty__netty | common/src/test/java/io/netty/util/internal/ThreadLocalRandomTest.java | {
"start": 772,
"end": 1468
} | class ____ {
@Test
public void getInitialSeedUniquifierPreservesInterrupt() {
try {
Thread.currentThread().interrupt();
assertTrue(Thread.currentThread().isInterrupted(),
"Assert that thread is interrupted before invocation of getInitialSeedUniquifier()");
ThreadLocalRandom.getInitialSeedUniquifier();
assertTrue(Thread.currentThread().isInterrupted(),
"Assert that thread is interrupted after invocation of getInitialSeedUniquifier()");
} finally {
Thread.interrupted(); // clear interrupted status in order to not affect other tests
}
}
}
| ThreadLocalRandomTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/Char2DArrayAssertBaseTest.java | {
"start": 840,
"end": 1375
} | class ____ extends BaseTestTemplate<Char2DArrayAssert, char[][]> {
protected Char2DArrays arrays;
@Override
protected Char2DArrayAssert create_assertions() {
return new Char2DArrayAssert(new char[][] {});
}
@Override
protected void inject_internal_objects() {
super.inject_internal_objects();
arrays = mock(Char2DArrays.class);
assertions.char2dArrays = arrays;
}
protected Char2DArrays getArrays(Char2DArrayAssert someAssertions) {
return someAssertions.char2dArrays;
}
}
| Char2DArrayAssertBaseTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java | {
"start": 7081,
"end": 52976
} | class ____ extends ESTestCase {
private Client client;
private ThreadPool threadPool;
private AnalysisRegistry analysisRegistry;
private JobManager jobManager;
private JobResultsProvider jobResultsProvider;
private JobResultsPersister jobResultsPersister;
private JobDataCountsPersister jobDataCountsPersister;
private AnnotationPersister annotationPersister;
private AutodetectCommunicator autodetectCommunicator;
private AutodetectProcessFactory autodetectFactory;
private NormalizerFactory normalizerFactory;
private AnomalyDetectionAuditor auditor;
private ClusterState clusterState;
private ClusterService clusterService;
private NativeStorageProvider nativeStorageProvider;
private DataCounts dataCounts = new DataCounts("foo");
private ModelSizeStats modelSizeStats = new ModelSizeStats.Builder("foo").build();
private ModelSnapshot modelSnapshot = new ModelSnapshot.Builder("foo").build();
private Quantiles quantiles = new Quantiles("foo", new Date(), "state");
@Before
@SuppressWarnings("unchecked")
public void setup() throws Exception {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
client = mock(Client.class);
threadPool = mock(ThreadPool.class);
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
when(threadPool.executor(anyString())).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE);
when(client.threadPool()).thenReturn(threadPool);
doAnswer(invocationOnMock -> {
if (invocationOnMock.getArguments()[0] instanceof ActionType<?> v) {
ActionListener<?> l = (ActionListener<?>) invocationOnMock.getArguments()[2];
if (v == TransportClusterHealthAction.TYPE) {
ActionListener<ClusterHealthResponse> listener = (ActionListener<ClusterHealthResponse>) l;
listener.onResponse(
new ClusterHealthResponse(
"test",
new String[0],
ClusterState.EMPTY_STATE,
Metadata.DEFAULT_PROJECT_ID,
0,
0,
0,
TimeValue.ZERO
)
);
return null;
}
ParameterizedType parameterizedType = (ParameterizedType) v.getClass().getGenericSuperclass();
Type t = parameterizedType.getActualTypeArguments()[0];
if (t.getTypeName().contains("AcknowledgedResponse")) {
ActionListener<AcknowledgedResponse> listener = (ActionListener<AcknowledgedResponse>) l;
listener.onResponse(AcknowledgedResponse.TRUE);
return null;
}
fail("Mock not configured to handle generic type " + t.getTypeName());
}
return null;
}).when(client).execute(any(), any(), any());
analysisRegistry = CategorizationAnalyzerTests.buildTestAnalysisRegistry(TestEnvironment.newEnvironment(settings));
jobManager = mock(JobManager.class);
jobResultsProvider = mock(JobResultsProvider.class);
jobResultsPersister = mock(JobResultsPersister.class);
JobResultsPersister.Builder bulkPersisterBuilder = mock(JobResultsPersister.Builder.class);
when(jobResultsPersister.bulkPersisterBuilder(any(), any())).thenReturn(bulkPersisterBuilder);
jobDataCountsPersister = mock(JobDataCountsPersister.class);
annotationPersister = mock(AnnotationPersister.class);
AnnotationPersister.Builder bulkAnnotationsBuilder = mock(AnnotationPersister.Builder.class);
when(annotationPersister.bulkPersisterBuilder(any(), any())).thenReturn(bulkAnnotationsBuilder);
autodetectCommunicator = mock(AutodetectCommunicator.class);
autodetectFactory = mock(AutodetectProcessFactory.class);
normalizerFactory = mock(NormalizerFactory.class);
auditor = mock(AnomalyDetectionAuditor.class);
clusterService = mock(ClusterService.class);
ClusterSettings clusterSettings = new ClusterSettings(
Settings.EMPTY,
new HashSet<>(Arrays.asList(MachineLearning.MAX_OPEN_JOBS_PER_NODE, ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES))
);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
Metadata metadata = Metadata.builder()
.indices(
Map.of(
AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + "-000001",
IndexMetadata.builder(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + "-000001")
.settings(
Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.put(SETTING_INDEX_HIDDEN, true)
.put(SETTING_VERSION_CREATED, IndexVersion.current())
.build()
)
.putAlias(AliasMetadata.builder(AnomalyDetectorsIndex.jobStateIndexWriteAlias()).isHidden(true).build())
.build(),
AnnotationIndex.LATEST_INDEX_NAME,
IndexMetadata.builder(AnnotationIndex.LATEST_INDEX_NAME)
.settings(
Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.put(SETTING_INDEX_HIDDEN, true)
.put(SETTING_VERSION_CREATED, IndexVersion.current())
.build()
)
.putAlias(AliasMetadata.builder(AnnotationIndex.READ_ALIAS_NAME).isHidden(true).build())
.putAlias(AliasMetadata.builder(AnnotationIndex.WRITE_ALIAS_NAME).isHidden(true).build())
.build()
)
)
.build();
clusterState = mock(ClusterState.class);
when(clusterState.getMetadata()).thenReturn(metadata);
when(clusterState.metadata()).thenReturn(metadata);
nativeStorageProvider = mock(NativeStorageProvider.class);
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<Job> listener = (ActionListener<Job>) invocationOnMock.getArguments()[1];
listener.onResponse(createJobDetails("foo"));
return null;
}).when(jobManager).getJob(eq("foo"), any());
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
Consumer<AutodetectParams> handler = (Consumer<AutodetectParams>) invocationOnMock.getArguments()[1];
handler.accept(buildAutodetectParams());
return null;
}).when(jobResultsProvider).getAutodetectParams(any(), any(), any());
// when running retry logic use the real executor service
when(threadPool.generic()).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE);
}
public void testOpenJob() {
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<Job> listener = (ActionListener<Job>) invocationOnMock.getArguments()[1];
listener.onResponse(createJobDetails("foo"));
return null;
}).when(jobManager).getJob(eq("foo"), any());
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
when(jobTask.getAllocationId()).thenReturn(1L);
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
assertEquals(1, manager.numberOfOpenJobs());
assertTrue(manager.jobHasActiveAutodetectProcess(jobTask));
ArgumentCaptor<JobTaskState> captor = ArgumentCaptor.forClass(JobTaskState.class);
verify(jobTask).updatePersistentTaskState(captor.capture(), any());
JobTaskState state = captor.getValue();
assertThat(state.getState(), equalTo(JobState.OPENED));
assertThat(state.getAllocationId(), equalTo(1L));
assertNull(state.getReason());
}
public void testOpenJob_withoutVersion() {
Job.Builder jobBuilder = new Job.Builder(createJobDetails("no_version"));
jobBuilder.setJobVersion(null);
Job job = jobBuilder.build();
assertThat(job.getJobVersion(), is(nullValue()));
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<Job> listener = (ActionListener<Job>) invocationOnMock.getArguments()[1];
listener.onResponse(job);
return null;
}).when(jobManager).getJob(eq(job.getId()), any());
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn(job.getId());
AtomicReference<Exception> errorHolder = new AtomicReference<>();
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> errorHolder.set(e));
Exception error = errorHolder.get();
assertThat(error, is(notNullValue()));
assertThat(error.getMessage(), equalTo("Cannot open job [no_version] because jobs created prior to version 5.5 are not supported"));
}
@SuppressWarnings("unchecked")
public void testOpenJob_exceedMaxNumJobs() {
for (String jobId : new String[] { "foo", "bar", "baz", "foobar" }) {
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<Job> listener = (ActionListener<Job>) invocationOnMock.getArguments()[1];
listener.onResponse(createJobDetails(jobId));
return null;
}).when(jobManager).getJob(eq(jobId), any());
}
ThreadPool.Cancellable cancellable = mock(ThreadPool.Cancellable.class);
when(threadPool.scheduleWithFixedDelay(any(), any(), any())).thenReturn(cancellable);
AutodetectProcess autodetectProcess = mock(AutodetectProcess.class);
when(autodetectProcess.isProcessAlive()).thenReturn(true);
when(autodetectProcess.readAutodetectResults()).thenReturn(Collections.emptyIterator());
autodetectFactory = (pid, j, autodetectParams, e, onProcessCrash) -> autodetectProcess;
Settings.Builder settings = Settings.builder();
settings.put(MachineLearning.MAX_OPEN_JOBS_PER_NODE.getKey(), 3);
AutodetectProcessManager manager = createSpyManager(settings.build());
doCallRealMethod().when(manager).create(any(), any(), any(), any());
ExecutorService executorService = mock(ExecutorService.class);
Future<?> future = mock(Future.class);
when(executorService.submit(any(Callable.class))).thenReturn(future);
doReturn(executorService).when(manager).createAutodetectExecutorService(any());
doAnswer(invocationOnMock -> {
CheckedConsumer<Exception, IOException> consumer = (CheckedConsumer<Exception, IOException>) invocationOnMock.getArguments()[3];
consumer.accept(null);
return null;
}).when(manager).setJobState(any(), eq(JobState.FAILED), any(), any());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("bar");
when(jobTask.getAllocationId()).thenReturn(1L);
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("baz");
when(jobTask.getAllocationId()).thenReturn(2L);
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
assertEquals(3, manager.numberOfOpenJobs());
Exception[] holder = new Exception[1];
jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foobar");
when(jobTask.getAllocationId()).thenReturn(3L);
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> holder[0] = e);
Exception e = holder[0];
assertEquals("max running job capacity [3] reached", e.getMessage());
jobTask = mock(JobTask.class);
when(jobTask.getAllocationId()).thenReturn(2L);
when(jobTask.getJobId()).thenReturn("baz");
manager.closeJob(jobTask, null);
assertEquals(2, manager.numberOfOpenJobs());
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e1, b) -> {});
assertEquals(3, manager.numberOfOpenJobs());
}
public void testProcessData() {
AutodetectProcessManager manager = createSpyManager();
assertEquals(0, manager.numberOfOpenJobs());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
DataLoadParams params = new DataLoadParams(TimeRange.builder().build(), Optional.empty());
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
params,
(dataCounts1, e) -> {}
);
assertEquals(1, manager.numberOfOpenJobs());
}
public void testProcessDataThrowsElasticsearchStatusException_onIoException() {
AutodetectProcessManager manager = createSpyManager();
DataLoadParams params = mock(DataLoadParams.class);
InputStream inputStream = createInputStream("");
XContentType xContentType = randomFrom(XContentType.values());
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
BiConsumer<DataCounts, Exception> handler = (BiConsumer<DataCounts, Exception>) invocationOnMock.getArguments()[4];
handler.accept(null, new IOException("blah"));
return null;
}).when(autodetectCommunicator).writeToJob(eq(inputStream), same(analysisRegistry), same(xContentType), eq(params), any());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
Exception[] holder = new Exception[1];
manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> holder[0] = e);
assertNotNull(holder[0]);
}
public void testCloseJob() {
AutodetectProcessManager manager = createSpyManager();
assertEquals(0, manager.numberOfOpenJobs());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> {}
);
// job is created
assertEquals(1, manager.numberOfOpenJobs());
manager.closeJob(jobTask, null);
assertEquals(0, manager.numberOfOpenJobs());
verify(autodetectCommunicator).setVacating(false);
}
public void testVacate() {
ExecutorService executorService = mock(ExecutorService.class);
doAnswer(invocationOnMock -> {
((Runnable) invocationOnMock.getArguments()[0]).run();
return null;
}).when(executorService).execute(any(Runnable.class));
when(threadPool.executor(anyString())).thenReturn(executorService);
AutodetectProcessManager manager = createSpyManager();
assertEquals(0, manager.numberOfOpenJobs());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
when(jobTask.triggerVacate()).thenReturn(true);
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> {}
);
// job is created
assertEquals(1, manager.numberOfOpenJobs());
when(jobTask.isVacating()).thenReturn(true);
manager.vacateOpenJobsOnThisNode();
assertEquals(0, manager.numberOfOpenJobs());
verify(autodetectCommunicator).setVacating(true);
}
public void testCanCloseClosingJob() throws Exception {
AtomicInteger numberOfCommunicatorCloses = new AtomicInteger(0);
doAnswer(invocationOnMock -> {
numberOfCommunicatorCloses.incrementAndGet();
// This increases the chance of the two threads both getting into
// the middle of the AutodetectProcessManager.close() method
Thread.yield();
return null;
}).when(autodetectCommunicator).close();
AutodetectProcessManager manager = createSpyManager();
assertEquals(0, manager.numberOfOpenJobs());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> {}
);
assertEquals(1, manager.numberOfOpenJobs());
// Close the job in a separate thread
Thread closeThread = new Thread(() -> manager.closeJob(jobTask, "in separate thread"));
closeThread.start();
Thread.yield();
// Also close the job in the current thread, so that we have two simultaneous close requests
manager.closeJob(jobTask, "in main test thread");
// The 10 second timeout here is usually far in excess of what is required. In the vast
// majority of cases the other thread will exit within a few milliseconds. However, it
// has been observed that on some VMs the test can fail because the VM stalls at the
// wrong moment. A 10 second timeout is on a par with the length of time assertBusy()
// would wait under these circumstances.
closeThread.join(10000);
assertFalse(closeThread.isAlive());
// Only one of the threads should have called AutodetectCommunicator.close()
assertEquals(1, numberOfCommunicatorCloses.get());
assertEquals(0, manager.numberOfOpenJobs());
}
public void testCanKillClosingJob() throws Exception {
CountDownLatch closeStartedLatch = new CountDownLatch(1);
CountDownLatch killLatch = new CountDownLatch(1);
CountDownLatch closeInterruptedLatch = new CountDownLatch(1);
doAnswer(invocationOnMock -> {
closeStartedLatch.countDown();
if (killLatch.await(3, TimeUnit.SECONDS)) {
closeInterruptedLatch.countDown();
}
return null;
}).when(autodetectCommunicator).close();
doAnswer(invocationOnMock -> {
killLatch.countDown();
return null;
}).when(autodetectCommunicator).killProcess(anyBoolean(), anyBoolean(), anyBoolean());
AutodetectProcessManager manager = createSpyManager();
assertEquals(0, manager.numberOfOpenJobs());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> {}
);
// Close the job in a separate thread so that it can simulate taking a long time to close
Thread closeThread = new Thread(() -> manager.closeJob(jobTask, null));
closeThread.start();
assertTrue(closeStartedLatch.await(3, TimeUnit.SECONDS));
// Kill the job in the current thread, which will be while the job is "closing"
manager.killProcess(jobTask, false, null);
assertEquals(0, killLatch.getCount());
// Assert close method was awoken by the kill
assertTrue(closeInterruptedLatch.await(3, TimeUnit.SECONDS));
closeThread.join(500);
assertFalse(closeThread.isAlive());
}
public void testBucketResetMessageIsSent() {
AutodetectProcessManager manager = createSpyManager();
XContentType xContentType = randomFrom(XContentType.values());
DataLoadParams params = new DataLoadParams(TimeRange.builder().startTime("1000").endTime("2000").build(), Optional.empty());
InputStream inputStream = createInputStream("");
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> {});
verify(autodetectCommunicator).writeToJob(same(inputStream), same(analysisRegistry), same(xContentType), same(params), any());
}
public void testFlush() {
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
InputStream inputStream = createInputStream("");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
inputStream,
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> {}
);
FlushJobParams params = FlushJobParams.builder().build();
manager.flushJob(jobTask, params, ActionTestUtils.assertNoFailureListener(flushAcknowledgement -> {}));
verify(autodetectCommunicator).flushJob(same(params), any());
}
public void testFlushThrows() {
AutodetectProcessManager manager = createSpyManagerAndCallProcessData("foo");
FlushJobParams params = FlushJobParams.builder().build();
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
BiConsumer<Void, Exception> handler = (BiConsumer<Void, Exception>) invocationOnMock.getArguments()[1];
handler.accept(null, new IOException("blah"));
return null;
}).when(autodetectCommunicator).flushJob(same(params), any());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
Exception[] holder = new Exception[1];
manager.flushJob(jobTask, params, ActionListener.wrap(flushAcknowledgement -> {}, e -> holder[0] = e));
assertEquals("[foo] exception while flushing job", holder[0].getMessage());
}
public void testCloseThrows() {
AutodetectProcessManager manager = createSpyManager();
// let the communicator throw, simulating a problem with the underlying
// autodetect, e.g. a crash
doThrow(RuntimeException.class).when(autodetectCommunicator).close();
// create a jobtask
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> {}
);
verify(manager).setJobState(any(), eq(JobState.OPENED), any(), any());
// job is created
assertEquals(1, manager.numberOfOpenJobs());
expectThrows(ElasticsearchException.class, () -> manager.closeJob(jobTask, null));
assertEquals(0, manager.numberOfOpenJobs());
verify(manager).setJobState(any(), eq(JobState.FAILED), any());
}
public void testWriteUpdateProcessMessage() {
AutodetectProcessManager manager = createSpyManagerAndCallProcessData("foo");
ModelPlotConfig modelConfig = mock(ModelPlotConfig.class);
List<DetectionRule> rules = Collections.singletonList(mock(DetectionRule.class));
List<JobUpdate.DetectorUpdate> detectorUpdates = Collections.singletonList(new JobUpdate.DetectorUpdate(2, null, rules));
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
UpdateParams updateParams = UpdateParams.builder("foo").modelPlotConfig(modelConfig).detectorUpdates(detectorUpdates).build();
manager.writeUpdateProcessMessage(jobTask, updateParams, e -> {});
ArgumentCaptor<UpdateProcessMessage> captor = ArgumentCaptor.forClass(UpdateProcessMessage.class);
verify(autodetectCommunicator).writeUpdateProcessMessage(captor.capture(), any());
UpdateProcessMessage updateProcessMessage = captor.getValue();
assertThat(updateProcessMessage.getModelPlotConfig(), equalTo(modelConfig));
assertThat(updateProcessMessage.getDetectorUpdates(), equalTo(detectorUpdates));
}
public void testJobHasActiveAutodetectProcess() {
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
assertFalse(manager.jobHasActiveAutodetectProcess(jobTask));
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> {}
);
assertTrue(manager.jobHasActiveAutodetectProcess(jobTask));
jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("bar");
when(jobTask.getAllocationId()).thenReturn(1L);
assertFalse(manager.jobHasActiveAutodetectProcess(jobTask));
}
public void testKillKillsAutodetectProcess() throws IOException {
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
assertFalse(manager.jobHasActiveAutodetectProcess(jobTask));
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> {}
);
assertTrue(manager.jobHasActiveAutodetectProcess(jobTask));
manager.killAllProcessesOnThisNode();
verify(autodetectCommunicator).killProcess(false, false, true);
}
public void testKillingAMissingJobFinishesTheTask() {
AutodetectProcessManager manager = createSpyManager();
XPackLicenseState licenseState = mock(XPackLicenseState.class);
AtomicBoolean markCalled = new AtomicBoolean();
JobTask jobTask = new JobTask("foo", 0, "type", "action", TaskId.EMPTY_TASK_ID, Map.of(), licenseState) {
@Override
protected void doMarkAsCompleted() {
markCalled.set(true);
}
};
jobTask.init(mock(PersistentTasksService.class), mock(TaskManager.class), "taskid", 0);
manager.killProcess(jobTask, false, null);
assertThat(markCalled.get(), is(true));
}
public void testProcessData_GivenStateNotOpened() {
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
BiConsumer<DataCounts, Exception> handler = (BiConsumer<DataCounts, Exception>) invocationOnMock.getArguments()[4];
handler.accept(new DataCounts("foo"), null);
return null;
}).when(autodetectCommunicator).writeToJob(any(), any(), any(), any(), any());
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
InputStream inputStream = createInputStream("");
DataCounts[] dataCounts = new DataCounts[1];
manager.processData(
jobTask,
analysisRegistry,
inputStream,
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts1, e) -> dataCounts[0] = dataCounts1
);
assertThat(dataCounts[0], equalTo(new DataCounts("foo")));
}
public void testCreate_notEnoughThreads() throws IOException {
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
ExecutorService executorService = mock(ExecutorService.class);
doThrow(new EsRejectedExecutionException("")).when(executorService).submit(any(Runnable.class));
when(threadPool.executor(anyString())).thenReturn(executorService);
when(threadPool.scheduleWithFixedDelay(any(), any(), any())).thenReturn(mock(ThreadPool.Cancellable.class));
Job job = createJobDetails("my_id");
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<Job> listener = (ActionListener<Job>) invocationOnMock.getArguments()[1];
listener.onResponse(job);
return null;
}).when(jobManager).getJob(eq("my_id"), any());
AutodetectProcess autodetectProcess = mock(AutodetectProcess.class);
autodetectFactory = (pid, j, autodetectParams, e, onProcessCrash) -> autodetectProcess;
AutodetectProcessManager manager = createSpyManager();
doCallRealMethod().when(manager).create(any(), any(), any(), any());
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("my_id");
expectThrows(EsRejectedExecutionException.class, () -> manager.create(jobTask, job, buildAutodetectParams(), (e, b) -> {}));
verify(autodetectProcess, times(1)).close();
}
public void testCreate_givenFirstTime() {
modelSnapshot = null;
AutodetectProcessManager manager = createNonSpyManager("foo");
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.create(jobTask, createJobDetails("foo"), buildAutodetectParams(), (e, b) -> {});
String expectedNotification = "Loading model snapshot [N/A], job latest_record_timestamp [N/A]";
verify(auditor).info("foo", expectedNotification);
verifyNoMoreInteractions(auditor);
}
public void testCreate_givenExistingModelSnapshot() {
modelSnapshot = new ModelSnapshot.Builder("foo").setSnapshotId("snapshot-1").setLatestRecordTimeStamp(new Date(0L)).build();
dataCounts = new DataCounts("foo");
dataCounts.setLatestRecordTimeStamp(new Date(1L));
AutodetectProcessManager manager = createNonSpyManager("foo");
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.create(jobTask, createJobDetails("foo"), buildAutodetectParams(), (e, b) -> {});
String expectedNotification = "Loading model snapshot [snapshot-1] with "
+ "latest_record_timestamp [1970-01-01T00:00:00.000Z], "
+ "job latest_record_timestamp [1970-01-01T00:00:00.001Z]";
verify(auditor).info("foo", expectedNotification);
verifyNoMoreInteractions(auditor);
}
public void testCreate_givenNonZeroCountsAndNoModelSnapshotNorQuantiles() {
modelSnapshot = null;
quantiles = null;
dataCounts = new DataCounts("foo");
dataCounts.setLatestRecordTimeStamp(new Date(0L));
dataCounts.incrementProcessedRecordCount(42L);
AutodetectProcessManager manager = createNonSpyManager("foo");
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.create(jobTask, createJobDetails("foo"), buildAutodetectParams(), (e, b) -> {});
String expectedNotification = "Loading model snapshot [N/A], " + "job latest_record_timestamp [1970-01-01T00:00:00.000Z]";
verify(auditor).info("foo", expectedNotification);
verify(auditor).warning("foo", "No model snapshot could be found for a job with processed records");
verify(auditor).warning("foo", "No quantiles could be found for a job with processed records");
verifyNoMoreInteractions(auditor);
}
public void testGetOpenProcessMemoryUsage() {
modelSnapshot = null;
quantiles = null;
dataCounts = new DataCounts("foo");
dataCounts.setLatestRecordTimeStamp(new Date(0L));
dataCounts.incrementProcessedRecordCount(42L);
long modelMemoryLimitBytes = ByteSizeValue.ofMb(randomIntBetween(10, 1000)).getBytes();
long peakModelBytes = randomLongBetween(100000, modelMemoryLimitBytes - 1);
long modelBytes = randomLongBetween(1, peakModelBytes - 1);
AssignmentMemoryBasis assignmentMemoryBasis = randomFrom(AssignmentMemoryBasis.values());
modelSizeStats = new ModelSizeStats.Builder("foo").setModelBytesMemoryLimit(modelMemoryLimitBytes)
.setPeakModelBytes(peakModelBytes)
.setModelBytes(modelBytes)
.setAssignmentMemoryBasis(assignmentMemoryBasis)
.build();
when(autodetectCommunicator.getModelSizeStats()).thenReturn(modelSizeStats);
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn("foo");
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
long expectedSizeBytes = Job.PROCESS_MEMORY_OVERHEAD.getBytes() + switch (assignmentMemoryBasis) {
case MODEL_MEMORY_LIMIT -> modelMemoryLimitBytes;
case CURRENT_MODEL_BYTES -> modelBytes;
case PEAK_MODEL_BYTES -> peakModelBytes;
};
assertThat(manager.getOpenProcessMemoryUsage(), equalTo(ByteSizeValue.ofBytes(expectedSizeBytes)));
}
public void testSetJobState_withoutHandler_invokesPersistentTaskUpdate() {
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getAllocationId()).thenReturn(123L);
when(jobTask.getJobId()).thenReturn("job-123");
// call the no-handler overload
manager.setJobState(jobTask, JobState.CLOSING, "closing-reason");
// verify we called updatePersistentTaskState with the expected state
@SuppressWarnings("unchecked")
ArgumentCaptor<JobTaskState> stateCaptor = ArgumentCaptor.forClass(JobTaskState.class);
verify(jobTask).updatePersistentTaskState(stateCaptor.capture(), any());
JobTaskState captured = stateCaptor.getValue();
assertEquals(JobState.CLOSING, captured.getState());
assertEquals(123L, captured.getAllocationId());
assertEquals("closing-reason", captured.getReason());
}
public void testSetJobState_withHandler_onResponse_triggersHandlerNull() throws IOException {
// This test verifies the “happy‐path” of the retryable overload—i.e. what happens when the very first call
// to updatePersistentTaskState succeeds. On a successful state update it must invoke handler.accept(null)
// (because there was no error).
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
// stub updatePersistentTaskState to call onResponse
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<PersistentTasksCustomMetadata.PersistentTask<?>> listener = (ActionListener<
PersistentTasksCustomMetadata.PersistentTask<?>>) invocation.getArguments()[1];
listener.onResponse(null);
return null;
}).when(jobTask).updatePersistentTaskState(any(), any());
AtomicReference<Exception> holder = new AtomicReference<>();
CheckedConsumer<Exception, IOException> handler = holder::set;
manager.setJobState(jobTask, JobState.FAILED, "fail-reason", handler);
// onResponse should have driven handler.accept(null)
assertNull(holder.get());
verify(jobTask).updatePersistentTaskState(any(JobTaskState.class), any());
}
public void testSetJobState_withHandler_onFailure_triggersHandlerException() throws IOException {
// Verifies that when updatePersistentTaskState reports a failure, the handler receives that exception
when(threadPool.schedule(any(Runnable.class), any(TimeValue.class), any(Executor.class))).thenAnswer(invocation -> {
Runnable r = invocation.getArgument(0);
r.run();
return mock(ThreadPool.Cancellable.class);
});
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
ResourceNotFoundException boom = new ResourceNotFoundException("boom");
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<PersistentTasksCustomMetadata.PersistentTask<?>> listener = (ActionListener<
PersistentTasksCustomMetadata.PersistentTask<?>>) invocation.getArguments()[1];
listener.onFailure(boom);
return null;
}).when(jobTask).updatePersistentTaskState(any(), any());
AtomicReference<Exception> holder = new AtomicReference<>();
CheckedConsumer<Exception, IOException> handler = holder::set;
manager.setJobState(jobTask, JobState.FAILED, "fail-reason", handler);
// onFailure should have driven handler.accept(boom)
assertSame(boom, holder.get());
verify(jobTask).updatePersistentTaskState(any(JobTaskState.class), any());
}
public void testSetJobState_withHandler_retriesUntilSuccess() throws IOException {
// Verifies that transient failures are retried until eventual success, and the handler receives null on success
// ensure that all retries are executed on the same thread for determinism
when(threadPool.schedule(any(Runnable.class), any(TimeValue.class), any(Executor.class))).thenAnswer(invocation -> {
Runnable r = invocation.getArgument(0);
r.run();
return mock(ThreadPool.Cancellable.class);
});
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
AtomicInteger attempts = new AtomicInteger();
doAnswer(invocation -> {
// Simulate transient failures for the first two attempts, then succeed on the third
@SuppressWarnings("unchecked")
ActionListener<PersistentTasksCustomMetadata.PersistentTask<?>> listener = (ActionListener<
PersistentTasksCustomMetadata.PersistentTask<?>>) invocation.getArguments()[1];
if (attempts.incrementAndGet() < 3) {
listener.onFailure(new RuntimeException("transient failure"));
} else {
listener.onResponse(null);
}
return null;
}).when(jobTask).updatePersistentTaskState(any(), any());
AtomicReference<Exception> holder = new AtomicReference<>();
CheckedConsumer<Exception, IOException> handler = holder::set;
manager.setJobState(jobTask, JobState.OPENED, "retry-test", handler);
// confirms that the method was called exactly three times (two failures then one success).
verify(jobTask, times(3)).updatePersistentTaskState(any(JobTaskState.class), any());
assertNull(holder.get());
}
public void testSetJobState_withHandler_noRetryOnResourceNotFound() throws IOException {
// Ensures that if the persistent‐state update fails with a ResourceNotFoundException, the retry loop does not retry
// again but immediately invokes the user’s handler with that exception.
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
ResourceNotFoundException rnfe = new ResourceNotFoundException("not found");
doAnswer(invocation -> {
// Simulate a ResourceNotFoundException that should not be retried
@SuppressWarnings("unchecked")
ActionListener<PersistentTasksCustomMetadata.PersistentTask<?>> listener = (ActionListener<
PersistentTasksCustomMetadata.PersistentTask<?>>) invocation.getArguments()[1];
listener.onFailure(rnfe);
return null;
}).when(jobTask).updatePersistentTaskState(any(), any());
AtomicReference<Exception> holder = new AtomicReference<>();
CheckedConsumer<Exception, IOException> handler = holder::set;
manager.setJobState(jobTask, JobState.OPENED, "rnfe-test", handler);
// updatePersistentTaskState(...) was invoked exactly once (no retries).
verify(jobTask, times(1)).updatePersistentTaskState(any(JobTaskState.class), any());
// The handler should have been invoked with the ResourceNotFoundException
assertSame(rnfe, holder.get());
}
private AutodetectProcessManager createNonSpyManager(String jobId) {
ExecutorService executorService = mock(ExecutorService.class);
when(threadPool.executor(anyString())).thenReturn(executorService);
when(threadPool.scheduleWithFixedDelay(any(), any(), any())).thenReturn(mock(ThreadPool.Cancellable.class));
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<Job> listener = (ActionListener<Job>) invocationOnMock.getArguments()[1];
listener.onResponse(createJobDetails(jobId));
return null;
}).when(jobManager).getJob(eq(jobId), any());
AutodetectProcess autodetectProcess = mock(AutodetectProcess.class);
autodetectFactory = (pid, j, autodetectParams, e, onProcessCrash) -> autodetectProcess;
return createManager(Settings.EMPTY);
}
private AutodetectParams buildAutodetectParams() {
return new AutodetectParams.Builder("foo").setDataCounts(dataCounts)
.setModelSizeStats(modelSizeStats)
.setModelSnapshot(modelSnapshot)
.setQuantiles(quantiles)
.build();
}
private AutodetectProcessManager createSpyManager() {
return createSpyManager(Settings.EMPTY);
}
private AutodetectProcessManager createSpyManager(Settings settings) {
AutodetectProcessManager manager = createManager(settings);
manager = spy(manager);
doReturn(autodetectCommunicator).when(manager).create(any(), any(), eq(buildAutodetectParams()), any());
return manager;
}
private AutodetectProcessManager createManager(Settings settings) {
return new AutodetectProcessManager(
settings,
client,
threadPool,
new NamedXContentRegistry(Collections.emptyList()),
auditor,
clusterService,
jobManager,
jobResultsProvider,
jobResultsPersister,
jobDataCountsPersister,
annotationPersister,
autodetectFactory,
normalizerFactory,
nativeStorageProvider,
TestIndexNameExpressionResolver.newInstance()
);
}
private AutodetectProcessManager createSpyManagerAndCallProcessData(String jobId) {
AutodetectProcessManager manager = createSpyManager();
JobTask jobTask = mock(JobTask.class);
when(jobTask.getJobId()).thenReturn(jobId);
manager.openJob(jobTask, clusterState, TEST_REQUEST_TIMEOUT, (e, b) -> {});
manager.processData(
jobTask,
analysisRegistry,
createInputStream(""),
randomFrom(XContentType.values()),
mock(DataLoadParams.class),
(dataCounts, e) -> {}
);
return manager;
}
private Job createJobDetails(String jobId) {
DataDescription.Builder dd = new DataDescription.Builder();
dd.setTimeFormat("epoch");
Detector d = new Detector.Builder("metric", "value").build();
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(d));
Job.Builder builder = new Job.Builder(jobId);
builder.setDataDescription(dd);
builder.setAnalysisConfig(ac);
return builder.build(new Date());
}
private static InputStream createInputStream(String input) {
return new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8));
}
}
| AutodetectProcessManagerTests |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 125596,
"end": 125885
} | class ____<K extends Comparable<K>, V> {",
" abstract K key();",
" abstract V value();",
" abstract Builder<K, V> toBuilder1();",
" abstract Builder<K, V> toBuilder2();",
"",
" @AutoValue.Builder",
" | Baz |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/lock/model/LockInstance.java | {
"start": 906,
"end": 3095
} | class ____ implements Serializable {
private static final long serialVersionUID = -3460985546826875524L;
private String key;
private Long expiredTime;
private Map<String, ? extends Serializable> params;
private String lockType;
public LockInstance(String key, Long expiredTime, String lockType) {
this.key = key;
this.expiredTime = expiredTime;
this.lockType = lockType;
}
public LockInstance() {
}
public Long getExpiredTime() {
return expiredTime;
}
public void setExpiredTime(Long expiredTime) {
this.expiredTime = expiredTime;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public Map<String, ? extends Serializable> getParams() {
return params;
}
public void setParams(Map<String, ? extends Serializable> params) {
this.params = params;
}
/**
* Will call {@link LockService#remoteTryLock(LockInstance)} request grpc to get lock and do something.<br/> can be
* {@link Override} to do some client special logic.
*
* @param lockService {@link LockService}
* @return Boolean {@link Boolean}
* @throws NacosException NacosException
*/
public Boolean lock(LockService lockService) throws NacosException {
return lockService.remoteTryLock(this);
}
/**
* Will call {@link LockService#remoteReleaseLock(LockInstance)} request grpc to release lock and do something.<br/>
* can be {@link Override} to do some client special logic.
*
* @param lockService {@link LockService}
* @return Boolean {@link Boolean}
* @throws NacosException NacosException
*/
public Boolean unLock(LockService lockService) throws NacosException {
return lockService.remoteReleaseLock(this);
}
/**
* spi get lock type.
*
* @return type
*/
public String getLockType() {
return lockType;
}
public void setLockType(String lockType) {
this.lockType = lockType;
}
}
| LockInstance |
java | google__dagger | javatests/dagger/functional/factory/SubcomponentFactoryTest.java | {
"start": 1588,
"end": 2094
} | interface ____ {
Sub create(@BindsInstance String s);
}
}
@Test
public void parentComponentWithSubcomponentFactoryEntryPoint() {
ParentWithSubcomponentFactory parent =
DaggerSubcomponentFactoryTest_ParentWithSubcomponentFactory.factory().create(3);
Sub subcomponent = parent.subcomponentFactory().create("foo");
assertThat(subcomponent.i()).isEqualTo(3);
assertThat(subcomponent.s()).isEqualTo("foo");
}
@Module(subcomponents = Sub.class)
abstract static | Factory |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistry.java | {
"start": 4737,
"end": 4957
} | interface ____ {
void invalidate(Collection<String> keys);
void invalidateAll();
default boolean shouldClearOnSecurityIndexStateChange() {
return true;
}
}
}
| CacheInvalidator |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/ForUtil.java | {
"start": 900,
"end": 47473
} | class ____ {
public static final int BLOCK_SIZE_SHIFT = 7;
public static final int BLOCK_SIZE = 1 << BLOCK_SIZE_SHIFT;
private static final int BLOCK_SIZE_MASK = BLOCK_SIZE - 1;
private static final ThreadLocal<long[]> scratch = ThreadLocal.withInitial(() -> new long[BLOCK_SIZE / 2]);
private ForUtil() {}
private static long expandMask32(long mask32) {
return mask32 | (mask32 << 32);
}
private static long expandMask16(long mask16) {
return expandMask32(mask16 | (mask16 << 16));
}
private static long expandMask8(long mask8) {
return expandMask16(mask8 | (mask8 << 8));
}
private static long mask32(int bitsPerValue) {
return expandMask32((1L << bitsPerValue) - 1);
}
private static long mask16(int bitsPerValue) {
return expandMask16((1L << bitsPerValue) - 1);
}
private static long mask8(int bitsPerValue) {
return expandMask8((1L << bitsPerValue) - 1);
}
private static void expand8(long[] arr, int offset) {
for (int i = 0; i < 16; ++i) {
long l = arr[i + offset];
arr[i + offset] = (l >>> 56) & 0xFFL;
arr[16 + i + offset] = (l >>> 48) & 0xFFL;
arr[32 + i + offset] = (l >>> 40) & 0xFFL;
arr[48 + i + offset] = (l >>> 32) & 0xFFL;
arr[64 + i + offset] = (l >>> 24) & 0xFFL;
arr[80 + i + offset] = (l >>> 16) & 0xFFL;
arr[96 + i + offset] = (l >>> 8) & 0xFFL;
arr[112 + i + offset] = l & 0xFFL;
}
}
private static void expand8To32(long[] arr, int offset) {
for (int i = 0; i < 16; ++i) {
long l = arr[i + offset];
arr[i + offset] = (l >>> 24) & 0x000000FF000000FFL;
arr[16 + i + offset] = (l >>> 16) & 0x000000FF000000FFL;
arr[32 + i + offset] = (l >>> 8) & 0x000000FF000000FFL;
arr[48 + i + offset] = l & 0x000000FF000000FFL;
}
}
private static void collapse8(long[] arr, int offset) {
for (int i = 0; i < 16; ++i) {
arr[i + offset] = (arr[i + offset] << 56) | (arr[16 + i + offset] << 48) | (arr[32 + i + offset] << 40) | (arr[48 + i + offset]
<< 32) | (arr[64 + i + offset] << 24) | (arr[80 + i + offset] << 16) | (arr[96 + i + offset] << 8) | arr[112 + i + offset];
}
}
private static void expand16(long[] arr, int offset) {
for (int i = 0; i < 32; ++i) {
long l = arr[i + offset];
arr[i + offset] = (l >>> 48) & 0xFFFFL;
arr[32 + i + offset] = (l >>> 32) & 0xFFFFL;
arr[64 + i + offset] = (l >>> 16) & 0xFFFFL;
arr[96 + i + offset] = l & 0xFFFFL;
}
}
private static void expand16To32(long[] arr, int offset) {
for (int i = 0; i < 32; ++i) {
long l = arr[i + offset];
arr[i + offset] = (l >>> 16) & 0x0000FFFF0000FFFFL;
arr[32 + i + offset] = l & 0x0000FFFF0000FFFFL;
}
}
private static void collapse16(long[] arr, int offset) {
for (int i = 0; i < 32; ++i) {
arr[i + offset] = (arr[i + offset] << 48) | (arr[32 + i + offset] << 32) | (arr[64 + i + offset] << 16) | arr[96 + i + offset];
}
}
private static void expand32(long[] arr, int offset) {
for (int i = 0; i < 64; ++i) {
long l = arr[i + offset];
arr[i + offset] = l >>> 32;
arr[64 + i + offset] = l & 0xFFFFFFFFL;
}
}
private static void collapse32(long[] arr, int offset) {
for (int i = 0; i < 64; ++i) {
arr[i + offset] = (arr[i + offset] << 32) | arr[64 + i + offset];
}
}
/** Encode an array of longs into {@code out}. The array size is expected to be a multiple of {@code BLOCK_SIZE}. */
public static void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException {
assert longs.length >= BLOCK_SIZE && (longs.length & BLOCK_SIZE_MASK) == 0
: "expected to get an array that a multiple of " + BLOCK_SIZE + ", got" + longs.length;
for (int i = 0; i < longs.length >> BLOCK_SIZE_SHIFT; i++) {
encode(longs, BLOCK_SIZE * i, bitsPerValue, out);
}
}
private static void encode(long[] longs, int offset, int bitsPerValue, DataOutput out) throws IOException {
final int nextPrimitive;
final int numLongs;
if (bitsPerValue <= 8) {
nextPrimitive = 8;
numLongs = BLOCK_SIZE / 8;
collapse8(longs, offset);
} else if (bitsPerValue <= 16) {
nextPrimitive = 16;
numLongs = BLOCK_SIZE / 4;
collapse16(longs, offset);
} else {
nextPrimitive = 32;
numLongs = BLOCK_SIZE / 2;
collapse32(longs, offset);
}
final int numLongsPerShift = bitsPerValue * 2;
int idx = offset;
int shift = nextPrimitive - bitsPerValue;
final long[] tmp = scratch.get();
for (int i = 0; i < numLongsPerShift; ++i) {
tmp[i] = longs[idx++] << shift;
}
for (shift = shift - bitsPerValue; shift >= 0; shift -= bitsPerValue) {
for (int i = 0; i < numLongsPerShift; ++i) {
tmp[i] |= longs[idx++] << shift;
}
}
final int remainingBitsPerLong = shift + bitsPerValue;
final long maskRemainingBitsPerLong;
if (nextPrimitive == 8) {
maskRemainingBitsPerLong = MASKS8[remainingBitsPerLong];
} else if (nextPrimitive == 16) {
maskRemainingBitsPerLong = MASKS16[remainingBitsPerLong];
} else {
maskRemainingBitsPerLong = MASKS32[remainingBitsPerLong];
}
int tmpIdx = 0;
int remainingBitsPerValue = bitsPerValue;
while (idx < numLongs + offset) {
if (remainingBitsPerValue >= remainingBitsPerLong) {
remainingBitsPerValue -= remainingBitsPerLong;
tmp[tmpIdx++] |= (longs[idx] >>> remainingBitsPerValue) & maskRemainingBitsPerLong;
if (remainingBitsPerValue == 0) {
idx++;
remainingBitsPerValue = bitsPerValue;
}
} else {
final long mask1, mask2;
if (nextPrimitive == 8) {
mask1 = MASKS8[remainingBitsPerValue];
mask2 = MASKS8[remainingBitsPerLong - remainingBitsPerValue];
} else if (nextPrimitive == 16) {
mask1 = MASKS16[remainingBitsPerValue];
mask2 = MASKS16[remainingBitsPerLong - remainingBitsPerValue];
} else {
mask1 = MASKS32[remainingBitsPerValue];
mask2 = MASKS32[remainingBitsPerLong - remainingBitsPerValue];
}
tmp[tmpIdx] |= (longs[idx++] & mask1) << (remainingBitsPerLong - remainingBitsPerValue);
remainingBitsPerValue = bitsPerValue - remainingBitsPerLong + remainingBitsPerValue;
tmp[tmpIdx++] |= (longs[idx] >>> remainingBitsPerValue) & mask2;
}
}
for (int i = 0; i < numLongsPerShift; ++i) {
out.writeLong(tmp[i]);
}
}
/** Number of bytes required to encode an array of {@code BLOCK_SIZE} longs with {@code bitsPerValue} bits per value. */
public static int numBytes(int bitsPerValue) {
return bitsPerValue << (BLOCK_SIZE_SHIFT - 3);
}
private static void decodeSlow(int bitsPerValue, DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
final int numLongs = bitsPerValue << 1;
in.readLongs(tmp, 0, numLongs);
final long mask = MASKS32[bitsPerValue];
int longsIdx = offset;
int shift = 32 - bitsPerValue;
for (; shift >= 0; shift -= bitsPerValue) {
shiftLongs(tmp, numLongs, longs, longsIdx, shift, mask);
longsIdx += numLongs;
}
final int remainingBitsPerLong = shift + bitsPerValue;
final long mask32RemainingBitsPerLong = MASKS32[remainingBitsPerLong];
int tmpIdx = 0;
int remainingBits = remainingBitsPerLong;
for (; (longsIdx & BLOCK_SIZE_MASK) < BLOCK_SIZE / 2; ++longsIdx) {
int b = bitsPerValue - remainingBits;
long l = (tmp[tmpIdx++] & MASKS32[remainingBits]) << b;
while (b >= remainingBitsPerLong) {
b -= remainingBitsPerLong;
l |= (tmp[tmpIdx++] & mask32RemainingBitsPerLong) << b;
}
if (b > 0) {
l |= (tmp[tmpIdx] >>> (remainingBitsPerLong - b)) & MASKS32[b];
remainingBits = remainingBitsPerLong - b;
} else {
remainingBits = remainingBitsPerLong;
}
longs[longsIdx] = l;
}
}
/**
* The pattern that this shiftLongs method applies is recognized by the C2 compiler, which
* generates SIMD instructions for it in order to shift multiple longs at once.
*/
private static void shiftLongs(long[] a, int count, long[] b, int bi, int shift, long mask) {
for (int i = 0; i < count; ++i) {
b[bi + i] = (a[i] >>> shift) & mask;
}
}
private static final long[] MASKS8 = new long[8];
private static final long[] MASKS16 = new long[16];
private static final long[] MASKS32 = new long[32];
static {
for (int i = 0; i < 8; ++i) {
MASKS8[i] = mask8(i);
}
for (int i = 0; i < 16; ++i) {
MASKS16[i] = mask16(i);
}
for (int i = 0; i < 32; ++i) {
MASKS32[i] = mask32(i);
}
}
// mark values in array as final longs to avoid the cost of reading array, arrays should only be
// used when the idx is a variable
private static final long MASK8_1 = MASKS8[1];
private static final long MASK8_2 = MASKS8[2];
private static final long MASK8_3 = MASKS8[3];
private static final long MASK8_4 = MASKS8[4];
private static final long MASK8_5 = MASKS8[5];
private static final long MASK8_6 = MASKS8[6];
private static final long MASK8_7 = MASKS8[7];
private static final long MASK16_1 = MASKS16[1];
private static final long MASK16_2 = MASKS16[2];
private static final long MASK16_3 = MASKS16[3];
private static final long MASK16_4 = MASKS16[4];
private static final long MASK16_5 = MASKS16[5];
private static final long MASK16_6 = MASKS16[6];
private static final long MASK16_7 = MASKS16[7];
private static final long MASK16_9 = MASKS16[9];
private static final long MASK16_10 = MASKS16[10];
private static final long MASK16_11 = MASKS16[11];
private static final long MASK16_12 = MASKS16[12];
private static final long MASK16_13 = MASKS16[13];
private static final long MASK16_14 = MASKS16[14];
private static final long MASK16_15 = MASKS16[15];
private static final long MASK32_1 = MASKS32[1];
private static final long MASK32_2 = MASKS32[2];
private static final long MASK32_3 = MASKS32[3];
private static final long MASK32_4 = MASKS32[4];
private static final long MASK32_5 = MASKS32[5];
private static final long MASK32_6 = MASKS32[6];
private static final long MASK32_7 = MASKS32[7];
private static final long MASK32_8 = MASKS32[8];
private static final long MASK32_9 = MASKS32[9];
private static final long MASK32_10 = MASKS32[10];
private static final long MASK32_11 = MASKS32[11];
private static final long MASK32_12 = MASKS32[12];
private static final long MASK32_13 = MASKS32[13];
private static final long MASK32_14 = MASKS32[14];
private static final long MASK32_15 = MASKS32[15];
private static final long MASK32_17 = MASKS32[17];
private static final long MASK32_18 = MASKS32[18];
private static final long MASK32_19 = MASKS32[19];
private static final long MASK32_20 = MASKS32[20];
private static final long MASK32_21 = MASKS32[21];
private static final long MASK32_22 = MASKS32[22];
private static final long MASK32_23 = MASKS32[23];
private static final long MASK32_24 = MASKS32[24];
/** Decode an encoded input stream into an array of longs. The array size is expected to be a multiple of {@code BLOCK_SIZE}. */
public static void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException {
int alignedSize = longs.length;
alignedSize = (alignedSize & ~1); // Postings reader uses a buffer with size (BLOCK_SIZE + 1).
assert alignedSize >= BLOCK_SIZE && (alignedSize & BLOCK_SIZE_MASK) == 0
: "expected to get an array that a multiple of " + BLOCK_SIZE + ", got" + alignedSize;
for (int i = 0; i < alignedSize >> BLOCK_SIZE_SHIFT; i++) {
decode(bitsPerValue, in, longs, BLOCK_SIZE * i);
}
}
private static void decode(int bitsPerValue, DataInput in, long[] longs, int offset) throws IOException {
final long[] tmp = scratch.get();
switch (bitsPerValue) {
case 1:
decode1(in, tmp, longs, offset);
expand8(longs, offset);
break;
case 2:
decode2(in, tmp, longs, offset);
expand8(longs, offset);
break;
case 3:
decode3(in, tmp, longs, offset);
expand8(longs, offset);
break;
case 4:
decode4(in, tmp, longs, offset);
expand8(longs, offset);
break;
case 5:
decode5(in, tmp, longs, offset);
expand8(longs, offset);
break;
case 6:
decode6(in, tmp, longs, offset);
expand8(longs, offset);
break;
case 7:
decode7(in, tmp, longs, offset);
expand8(longs, offset);
break;
case 8:
decode8(in, tmp, longs, offset);
expand8(longs, offset);
break;
case 9:
decode9(in, tmp, longs, offset);
expand16(longs, offset);
break;
case 10:
decode10(in, tmp, longs, offset);
expand16(longs, offset);
break;
case 11:
decode11(in, tmp, longs, offset);
expand16(longs, offset);
break;
case 12:
decode12(in, tmp, longs, offset);
expand16(longs, offset);
break;
case 13:
decode13(in, tmp, longs, offset);
expand16(longs, offset);
break;
case 14:
decode14(in, tmp, longs, offset);
expand16(longs, offset);
break;
case 15:
decode15(in, tmp, longs, offset);
expand16(longs, offset);
break;
case 16:
decode16(in, tmp, longs, offset);
expand16(longs, offset);
break;
case 17:
decode17(in, tmp, longs, offset);
expand32(longs, offset);
break;
case 18:
decode18(in, tmp, longs, offset);
expand32(longs, offset);
break;
case 19:
decode19(in, tmp, longs, offset);
expand32(longs, offset);
break;
case 20:
decode20(in, tmp, longs, offset);
expand32(longs, offset);
break;
case 21:
decode21(in, tmp, longs, offset);
expand32(longs, offset);
break;
case 22:
decode22(in, tmp, longs, offset);
expand32(longs, offset);
break;
case 23:
decode23(in, tmp, longs, offset);
expand32(longs, offset);
break;
case 24:
decode24(in, tmp, longs, offset);
expand32(longs, offset);
break;
default:
decodeSlow(bitsPerValue, in, tmp, longs, offset);
expand32(longs, offset);
break;
}
}
/**
* Decodes an encoded input stream into an array of longs, such that each long contains two values, each represented with 32 bits.
* Values [0..63] are encoded in the high-order bits of {@code longs} [0..63], and values [64..127] are encoded in the low-order
* bits of {@code longs} [0..63]. This representation allows subsequent operations to be performed on two values at a time.
* The size of the output array is expected to be a multiple of {@code BLOCK_SIZE}.
*/
public static void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException {
int alignedSize = longs.length;
alignedSize = (alignedSize & ~1); // Postings reader uses a buffer with size (BLOCK_SIZE + 1).
assert alignedSize >= BLOCK_SIZE && (alignedSize & BLOCK_SIZE_MASK) == 0
: "expected to get an array that a multiple of " + BLOCK_SIZE + ", got " + alignedSize;
for (int i = 0; i < alignedSize >> BLOCK_SIZE_SHIFT; ++i) {
decodeTo32(bitsPerValue, in, longs, BLOCK_SIZE * i);
}
}
private static void decodeTo32(int bitsPerValue, DataInput in, long[] longs, int offset) throws IOException {
final long[] tmp = scratch.get();
switch (bitsPerValue) {
case 1:
decode1(in, tmp, longs, offset);
expand8To32(longs, offset);
break;
case 2:
decode2(in, tmp, longs, offset);
expand8To32(longs, offset);
break;
case 3:
decode3(in, tmp, longs, offset);
expand8To32(longs, offset);
break;
case 4:
decode4(in, tmp, longs, offset);
expand8To32(longs, offset);
break;
case 5:
decode5(in, tmp, longs, offset);
expand8To32(longs, offset);
break;
case 6:
decode6(in, tmp, longs, offset);
expand8To32(longs, offset);
break;
case 7:
decode7(in, tmp, longs, offset);
expand8To32(longs, offset);
break;
case 8:
decode8(in, tmp, longs, offset);
expand8To32(longs, offset);
break;
case 9:
decode9(in, tmp, longs, offset);
expand16To32(longs, offset);
break;
case 10:
decode10(in, tmp, longs, offset);
expand16To32(longs, offset);
break;
case 11:
decode11(in, tmp, longs, offset);
expand16To32(longs, offset);
break;
case 12:
decode12(in, tmp, longs, offset);
expand16To32(longs, offset);
break;
case 13:
decode13(in, tmp, longs, offset);
expand16To32(longs, offset);
break;
case 14:
decode14(in, tmp, longs, offset);
expand16To32(longs, offset);
break;
case 15:
decode15(in, tmp, longs, offset);
expand16To32(longs, offset);
break;
case 16:
decode16(in, tmp, longs, offset);
expand16To32(longs, offset);
break;
case 17:
decode17(in, tmp, longs, offset);
break;
case 18:
decode18(in, tmp, longs, offset);
break;
case 19:
decode19(in, tmp, longs, offset);
break;
case 20:
decode20(in, tmp, longs, offset);
break;
case 21:
decode21(in, tmp, longs, offset);
break;
case 22:
decode22(in, tmp, longs, offset);
break;
case 23:
decode23(in, tmp, longs, offset);
break;
case 24:
decode24(in, tmp, longs, offset);
break;
default:
decodeSlow(bitsPerValue, in, tmp, longs, offset);
break;
}
}
private static void decode1(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 2);
shiftLongs(tmp, 2, longs, offset, 7, MASK8_1);
shiftLongs(tmp, 2, longs, offset + 2, 6, MASK8_1);
shiftLongs(tmp, 2, longs, offset + 4, 5, MASK8_1);
shiftLongs(tmp, 2, longs, offset + 6, 4, MASK8_1);
shiftLongs(tmp, 2, longs, offset + 8, 3, MASK8_1);
shiftLongs(tmp, 2, longs, offset + 10, 2, MASK8_1);
shiftLongs(tmp, 2, longs, offset + 12, 1, MASK8_1);
shiftLongs(tmp, 2, longs, offset + 14, 0, MASK8_1);
}
private static void decode2(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 4);
shiftLongs(tmp, 4, longs, offset, 6, MASK8_2);
shiftLongs(tmp, 4, longs, offset + 4, 4, MASK8_2);
shiftLongs(tmp, 4, longs, offset + 8, 2, MASK8_2);
shiftLongs(tmp, 4, longs, offset + 12, 0, MASK8_2);
}
private static void decode3(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 6);
shiftLongs(tmp, 6, longs, offset, 5, MASK8_3);
shiftLongs(tmp, 6, longs, offset + 6, 2, MASK8_3);
for (int iter = 0, tmpIdx = 0, longsIdx = 12 + offset; iter < 2; ++iter, tmpIdx += 3, longsIdx += 2) {
long l0 = (tmp[tmpIdx + 0] & MASK8_2) << 1;
l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK8_1;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK8_1) << 2;
l1 |= (tmp[tmpIdx + 2] & MASK8_2) << 0;
longs[longsIdx + 1] = l1;
}
}
private static void decode4(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 8);
shiftLongs(tmp, 8, longs, offset, 4, MASK8_4);
shiftLongs(tmp, 8, longs, offset + 8, 0, MASK8_4);
}
private static void decode5(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 10);
shiftLongs(tmp, 10, longs, offset, 3, MASK8_5);
for (int iter = 0, tmpIdx = 0, longsIdx = 10 + offset; iter < 2; ++iter, tmpIdx += 5, longsIdx += 3) {
long l0 = (tmp[tmpIdx + 0] & MASK8_3) << 2;
l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK8_2;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK8_1) << 4;
l1 |= (tmp[tmpIdx + 2] & MASK8_3) << 1;
l1 |= (tmp[tmpIdx + 3] >>> 2) & MASK8_1;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 3] & MASK8_2) << 3;
l2 |= (tmp[tmpIdx + 4] & MASK8_3) << 0;
longs[longsIdx + 2] = l2;
}
}
private static void decode6(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 12);
shiftLongs(tmp, 12, longs, offset, 2, MASK8_6);
shiftLongs(tmp, 12, tmp, 0, 0, MASK8_2);
for (int iter = 0, tmpIdx = 0, longsIdx = 12 + offset; iter < 4; ++iter, tmpIdx += 3, longsIdx += 1) {
long l0 = tmp[tmpIdx + 0] << 4;
l0 |= tmp[tmpIdx + 1] << 2;
l0 |= tmp[tmpIdx + 2] << 0;
longs[longsIdx + 0] = l0;
}
}
private static void decode7(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 14);
shiftLongs(tmp, 14, longs, offset, 1, MASK8_7);
shiftLongs(tmp, 14, tmp, 0, 0, MASK8_1);
for (int iter = 0, tmpIdx = 0, longsIdx = 14 + offset; iter < 2; ++iter, tmpIdx += 7, longsIdx += 1) {
long l0 = tmp[tmpIdx + 0] << 6;
l0 |= tmp[tmpIdx + 1] << 5;
l0 |= tmp[tmpIdx + 2] << 4;
l0 |= tmp[tmpIdx + 3] << 3;
l0 |= tmp[tmpIdx + 4] << 2;
l0 |= tmp[tmpIdx + 5] << 1;
l0 |= tmp[tmpIdx + 6] << 0;
longs[longsIdx + 0] = l0;
}
}
private static void decode8(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(longs, offset, 16);
}
private static void decode9(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 18);
shiftLongs(tmp, 18, longs, offset, 7, MASK16_9);
for (int iter = 0, tmpIdx = 0, longsIdx = 18 + offset; iter < 2; ++iter, tmpIdx += 9, longsIdx += 7) {
long l0 = (tmp[tmpIdx + 0] & MASK16_7) << 2;
l0 |= (tmp[tmpIdx + 1] >>> 5) & MASK16_2;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK16_5) << 4;
l1 |= (tmp[tmpIdx + 2] >>> 3) & MASK16_4;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 2] & MASK16_3) << 6;
l2 |= (tmp[tmpIdx + 3] >>> 1) & MASK16_6;
longs[longsIdx + 2] = l2;
long l3 = (tmp[tmpIdx + 3] & MASK16_1) << 8;
l3 |= (tmp[tmpIdx + 4] & MASK16_7) << 1;
l3 |= (tmp[tmpIdx + 5] >>> 6) & MASK16_1;
longs[longsIdx + 3] = l3;
long l4 = (tmp[tmpIdx + 5] & MASK16_6) << 3;
l4 |= (tmp[tmpIdx + 6] >>> 4) & MASK16_3;
longs[longsIdx + 4] = l4;
long l5 = (tmp[tmpIdx + 6] & MASK16_4) << 5;
l5 |= (tmp[tmpIdx + 7] >>> 2) & MASK16_5;
longs[longsIdx + 5] = l5;
long l6 = (tmp[tmpIdx + 7] & MASK16_2) << 7;
l6 |= (tmp[tmpIdx + 8] & MASK16_7) << 0;
longs[longsIdx + 6] = l6;
}
}
private static void decode10(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 20);
shiftLongs(tmp, 20, longs, offset, 6, MASK16_10);
for (int iter = 0, tmpIdx = 0, longsIdx = 20 + offset; iter < 4; ++iter, tmpIdx += 5, longsIdx += 3) {
long l0 = (tmp[tmpIdx + 0] & MASK16_6) << 4;
l0 |= (tmp[tmpIdx + 1] >>> 2) & MASK16_4;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK16_2) << 8;
l1 |= (tmp[tmpIdx + 2] & MASK16_6) << 2;
l1 |= (tmp[tmpIdx + 3] >>> 4) & MASK16_2;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 3] & MASK16_4) << 6;
l2 |= (tmp[tmpIdx + 4] & MASK16_6) << 0;
longs[longsIdx + 2] = l2;
}
}
private static void decode11(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 22);
shiftLongs(tmp, 22, longs, offset, 5, MASK16_11);
for (int iter = 0, tmpIdx = 0, longsIdx = 22 + offset; iter < 2; ++iter, tmpIdx += 11, longsIdx += 5) {
long l0 = (tmp[tmpIdx + 0] & MASK16_5) << 6;
l0 |= (tmp[tmpIdx + 1] & MASK16_5) << 1;
l0 |= (tmp[tmpIdx + 2] >>> 4) & MASK16_1;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 2] & MASK16_4) << 7;
l1 |= (tmp[tmpIdx + 3] & MASK16_5) << 2;
l1 |= (tmp[tmpIdx + 4] >>> 3) & MASK16_2;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 4] & MASK16_3) << 8;
l2 |= (tmp[tmpIdx + 5] & MASK16_5) << 3;
l2 |= (tmp[tmpIdx + 6] >>> 2) & MASK16_3;
longs[longsIdx + 2] = l2;
long l3 = (tmp[tmpIdx + 6] & MASK16_2) << 9;
l3 |= (tmp[tmpIdx + 7] & MASK16_5) << 4;
l3 |= (tmp[tmpIdx + 8] >>> 1) & MASK16_4;
longs[longsIdx + 3] = l3;
long l4 = (tmp[tmpIdx + 8] & MASK16_1) << 10;
l4 |= (tmp[tmpIdx + 9] & MASK16_5) << 5;
l4 |= (tmp[tmpIdx + 10] & MASK16_5) << 0;
longs[longsIdx + 4] = l4;
}
}
private static void decode12(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 24);
shiftLongs(tmp, 24, longs, offset, 4, MASK16_12);
shiftLongs(tmp, 24, tmp, 0, 0, MASK16_4);
for (int iter = 0, tmpIdx = 0, longsIdx = 24 + offset; iter < 8; ++iter, tmpIdx += 3, longsIdx += 1) {
long l0 = tmp[tmpIdx + 0] << 8;
l0 |= tmp[tmpIdx + 1] << 4;
l0 |= tmp[tmpIdx + 2] << 0;
longs[longsIdx + 0] = l0;
}
}
private static void decode13(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 26);
shiftLongs(tmp, 26, longs, offset, 3, MASK16_13);
for (int iter = 0, tmpIdx = 0, longsIdx = 26 + offset; iter < 2; ++iter, tmpIdx += 13, longsIdx += 3) {
long l0 = (tmp[tmpIdx + 0] & MASK16_3) << 10;
l0 |= (tmp[tmpIdx + 1] & MASK16_3) << 7;
l0 |= (tmp[tmpIdx + 2] & MASK16_3) << 4;
l0 |= (tmp[tmpIdx + 3] & MASK16_3) << 1;
l0 |= (tmp[tmpIdx + 4] >>> 2) & MASK16_1;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 4] & MASK16_2) << 11;
l1 |= (tmp[tmpIdx + 5] & MASK16_3) << 8;
l1 |= (tmp[tmpIdx + 6] & MASK16_3) << 5;
l1 |= (tmp[tmpIdx + 7] & MASK16_3) << 2;
l1 |= (tmp[tmpIdx + 8] >>> 1) & MASK16_2;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 8] & MASK16_1) << 12;
l2 |= (tmp[tmpIdx + 9] & MASK16_3) << 9;
l2 |= (tmp[tmpIdx + 10] & MASK16_3) << 6;
l2 |= (tmp[tmpIdx + 11] & MASK16_3) << 3;
l2 |= (tmp[tmpIdx + 12] & MASK16_3) << 0;
longs[longsIdx + 2] = l2;
}
}
private static void decode14(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 28);
shiftLongs(tmp, 28, longs, offset, 2, MASK16_14);
shiftLongs(tmp, 28, tmp, 0, 0, MASK16_2);
for (int iter = 0, tmpIdx = 0, longsIdx = 28 + offset; iter < 4; ++iter, tmpIdx += 7, longsIdx += 1) {
long l0 = tmp[tmpIdx + 0] << 12;
l0 |= tmp[tmpIdx + 1] << 10;
l0 |= tmp[tmpIdx + 2] << 8;
l0 |= tmp[tmpIdx + 3] << 6;
l0 |= tmp[tmpIdx + 4] << 4;
l0 |= tmp[tmpIdx + 5] << 2;
l0 |= tmp[tmpIdx + 6] << 0;
longs[longsIdx + 0] = l0;
}
}
private static void decode15(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 30);
shiftLongs(tmp, 30, longs, offset, 1, MASK16_15);
shiftLongs(tmp, 30, tmp, 0, 0, MASK16_1);
for (int iter = 0, tmpIdx = 0, longsIdx = 30 + offset; iter < 2; ++iter, tmpIdx += 15, longsIdx += 1) {
long l0 = tmp[tmpIdx + 0] << 14;
l0 |= tmp[tmpIdx + 1] << 13;
l0 |= tmp[tmpIdx + 2] << 12;
l0 |= tmp[tmpIdx + 3] << 11;
l0 |= tmp[tmpIdx + 4] << 10;
l0 |= tmp[tmpIdx + 5] << 9;
l0 |= tmp[tmpIdx + 6] << 8;
l0 |= tmp[tmpIdx + 7] << 7;
l0 |= tmp[tmpIdx + 8] << 6;
l0 |= tmp[tmpIdx + 9] << 5;
l0 |= tmp[tmpIdx + 10] << 4;
l0 |= tmp[tmpIdx + 11] << 3;
l0 |= tmp[tmpIdx + 12] << 2;
l0 |= tmp[tmpIdx + 13] << 1;
l0 |= tmp[tmpIdx + 14] << 0;
longs[longsIdx + 0] = l0;
}
}
private static void decode16(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(longs, offset, 32);
}
private static void decode17(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 34);
shiftLongs(tmp, 34, longs, offset, 15, MASK32_17);
for (int iter = 0, tmpIdx = 0, longsIdx = 34 + offset; iter < 2; ++iter, tmpIdx += 17, longsIdx += 15) {
long l0 = (tmp[tmpIdx + 0] & MASK32_15) << 2;
l0 |= (tmp[tmpIdx + 1] >>> 13) & MASK32_2;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK32_13) << 4;
l1 |= (tmp[tmpIdx + 2] >>> 11) & MASK32_4;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 2] & MASK32_11) << 6;
l2 |= (tmp[tmpIdx + 3] >>> 9) & MASK32_6;
longs[longsIdx + 2] = l2;
long l3 = (tmp[tmpIdx + 3] & MASK32_9) << 8;
l3 |= (tmp[tmpIdx + 4] >>> 7) & MASK32_8;
longs[longsIdx + 3] = l3;
long l4 = (tmp[tmpIdx + 4] & MASK32_7) << 10;
l4 |= (tmp[tmpIdx + 5] >>> 5) & MASK32_10;
longs[longsIdx + 4] = l4;
long l5 = (tmp[tmpIdx + 5] & MASK32_5) << 12;
l5 |= (tmp[tmpIdx + 6] >>> 3) & MASK32_12;
longs[longsIdx + 5] = l5;
long l6 = (tmp[tmpIdx + 6] & MASK32_3) << 14;
l6 |= (tmp[tmpIdx + 7] >>> 1) & MASK32_14;
longs[longsIdx + 6] = l6;
long l7 = (tmp[tmpIdx + 7] & MASK32_1) << 16;
l7 |= (tmp[tmpIdx + 8] & MASK32_15) << 1;
l7 |= (tmp[tmpIdx + 9] >>> 14) & MASK32_1;
longs[longsIdx + 7] = l7;
long l8 = (tmp[tmpIdx + 9] & MASK32_14) << 3;
l8 |= (tmp[tmpIdx + 10] >>> 12) & MASK32_3;
longs[longsIdx + 8] = l8;
long l9 = (tmp[tmpIdx + 10] & MASK32_12) << 5;
l9 |= (tmp[tmpIdx + 11] >>> 10) & MASK32_5;
longs[longsIdx + 9] = l9;
long l10 = (tmp[tmpIdx + 11] & MASK32_10) << 7;
l10 |= (tmp[tmpIdx + 12] >>> 8) & MASK32_7;
longs[longsIdx + 10] = l10;
long l11 = (tmp[tmpIdx + 12] & MASK32_8) << 9;
l11 |= (tmp[tmpIdx + 13] >>> 6) & MASK32_9;
longs[longsIdx + 11] = l11;
long l12 = (tmp[tmpIdx + 13] & MASK32_6) << 11;
l12 |= (tmp[tmpIdx + 14] >>> 4) & MASK32_11;
longs[longsIdx + 12] = l12;
long l13 = (tmp[tmpIdx + 14] & MASK32_4) << 13;
l13 |= (tmp[tmpIdx + 15] >>> 2) & MASK32_13;
longs[longsIdx + 13] = l13;
long l14 = (tmp[tmpIdx + 15] & MASK32_2) << 15;
l14 |= (tmp[tmpIdx + 16] & MASK32_15) << 0;
longs[longsIdx + 14] = l14;
}
}
private static void decode18(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 36);
shiftLongs(tmp, 36, longs, offset, 14, MASK32_18);
for (int iter = 0, tmpIdx = 0, longsIdx = 36 + offset; iter < 4; ++iter, tmpIdx += 9, longsIdx += 7) {
long l0 = (tmp[tmpIdx + 0] & MASK32_14) << 4;
l0 |= (tmp[tmpIdx + 1] >>> 10) & MASK32_4;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK32_10) << 8;
l1 |= (tmp[tmpIdx + 2] >>> 6) & MASK32_8;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 2] & MASK32_6) << 12;
l2 |= (tmp[tmpIdx + 3] >>> 2) & MASK32_12;
longs[longsIdx + 2] = l2;
long l3 = (tmp[tmpIdx + 3] & MASK32_2) << 16;
l3 |= (tmp[tmpIdx + 4] & MASK32_14) << 2;
l3 |= (tmp[tmpIdx + 5] >>> 12) & MASK32_2;
longs[longsIdx + 3] = l3;
long l4 = (tmp[tmpIdx + 5] & MASK32_12) << 6;
l4 |= (tmp[tmpIdx + 6] >>> 8) & MASK32_6;
longs[longsIdx + 4] = l4;
long l5 = (tmp[tmpIdx + 6] & MASK32_8) << 10;
l5 |= (tmp[tmpIdx + 7] >>> 4) & MASK32_10;
longs[longsIdx + 5] = l5;
long l6 = (tmp[tmpIdx + 7] & MASK32_4) << 14;
l6 |= (tmp[tmpIdx + 8] & MASK32_14) << 0;
longs[longsIdx + 6] = l6;
}
}
private static void decode19(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 38);
shiftLongs(tmp, 38, longs, offset, 13, MASK32_19);
for (int iter = 0, tmpIdx = 0, longsIdx = 38 + offset; iter < 2; ++iter, tmpIdx += 19, longsIdx += 13) {
long l0 = (tmp[tmpIdx + 0] & MASK32_13) << 6;
l0 |= (tmp[tmpIdx + 1] >>> 7) & MASK32_6;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK32_7) << 12;
l1 |= (tmp[tmpIdx + 2] >>> 1) & MASK32_12;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 2] & MASK32_1) << 18;
l2 |= (tmp[tmpIdx + 3] & MASK32_13) << 5;
l2 |= (tmp[tmpIdx + 4] >>> 8) & MASK32_5;
longs[longsIdx + 2] = l2;
long l3 = (tmp[tmpIdx + 4] & MASK32_8) << 11;
l3 |= (tmp[tmpIdx + 5] >>> 2) & MASK32_11;
longs[longsIdx + 3] = l3;
long l4 = (tmp[tmpIdx + 5] & MASK32_2) << 17;
l4 |= (tmp[tmpIdx + 6] & MASK32_13) << 4;
l4 |= (tmp[tmpIdx + 7] >>> 9) & MASK32_4;
longs[longsIdx + 4] = l4;
long l5 = (tmp[tmpIdx + 7] & MASK32_9) << 10;
l5 |= (tmp[tmpIdx + 8] >>> 3) & MASK32_10;
longs[longsIdx + 5] = l5;
long l6 = (tmp[tmpIdx + 8] & MASK32_3) << 16;
l6 |= (tmp[tmpIdx + 9] & MASK32_13) << 3;
l6 |= (tmp[tmpIdx + 10] >>> 10) & MASK32_3;
longs[longsIdx + 6] = l6;
long l7 = (tmp[tmpIdx + 10] & MASK32_10) << 9;
l7 |= (tmp[tmpIdx + 11] >>> 4) & MASK32_9;
longs[longsIdx + 7] = l7;
long l8 = (tmp[tmpIdx + 11] & MASK32_4) << 15;
l8 |= (tmp[tmpIdx + 12] & MASK32_13) << 2;
l8 |= (tmp[tmpIdx + 13] >>> 11) & MASK32_2;
longs[longsIdx + 8] = l8;
long l9 = (tmp[tmpIdx + 13] & MASK32_11) << 8;
l9 |= (tmp[tmpIdx + 14] >>> 5) & MASK32_8;
longs[longsIdx + 9] = l9;
long l10 = (tmp[tmpIdx + 14] & MASK32_5) << 14;
l10 |= (tmp[tmpIdx + 15] & MASK32_13) << 1;
l10 |= (tmp[tmpIdx + 16] >>> 12) & MASK32_1;
longs[longsIdx + 10] = l10;
long l11 = (tmp[tmpIdx + 16] & MASK32_12) << 7;
l11 |= (tmp[tmpIdx + 17] >>> 6) & MASK32_7;
longs[longsIdx + 11] = l11;
long l12 = (tmp[tmpIdx + 17] & MASK32_6) << 13;
l12 |= (tmp[tmpIdx + 18] & MASK32_13) << 0;
longs[longsIdx + 12] = l12;
}
}
private static void decode20(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 40);
shiftLongs(tmp, 40, longs, offset, 12, MASK32_20);
for (int iter = 0, tmpIdx = 0, longsIdx = 40 + offset; iter < 8; ++iter, tmpIdx += 5, longsIdx += 3) {
long l0 = (tmp[tmpIdx + 0] & MASK32_12) << 8;
l0 |= (tmp[tmpIdx + 1] >>> 4) & MASK32_8;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK32_4) << 16;
l1 |= (tmp[tmpIdx + 2] & MASK32_12) << 4;
l1 |= (tmp[tmpIdx + 3] >>> 8) & MASK32_4;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 3] & MASK32_8) << 12;
l2 |= (tmp[tmpIdx + 4] & MASK32_12) << 0;
longs[longsIdx + 2] = l2;
}
}
private static void decode21(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 42);
shiftLongs(tmp, 42, longs, offset, 11, MASK32_21);
for (int iter = 0, tmpIdx = 0, longsIdx = 42 + offset; iter < 2; ++iter, tmpIdx += 21, longsIdx += 11) {
long l0 = (tmp[tmpIdx + 0] & MASK32_11) << 10;
l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK32_10;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 1] & MASK32_1) << 20;
l1 |= (tmp[tmpIdx + 2] & MASK32_11) << 9;
l1 |= (tmp[tmpIdx + 3] >>> 2) & MASK32_9;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 3] & MASK32_2) << 19;
l2 |= (tmp[tmpIdx + 4] & MASK32_11) << 8;
l2 |= (tmp[tmpIdx + 5] >>> 3) & MASK32_8;
longs[longsIdx + 2] = l2;
long l3 = (tmp[tmpIdx + 5] & MASK32_3) << 18;
l3 |= (tmp[tmpIdx + 6] & MASK32_11) << 7;
l3 |= (tmp[tmpIdx + 7] >>> 4) & MASK32_7;
longs[longsIdx + 3] = l3;
long l4 = (tmp[tmpIdx + 7] & MASK32_4) << 17;
l4 |= (tmp[tmpIdx + 8] & MASK32_11) << 6;
l4 |= (tmp[tmpIdx + 9] >>> 5) & MASK32_6;
longs[longsIdx + 4] = l4;
long l5 = (tmp[tmpIdx + 9] & MASK32_5) << 16;
l5 |= (tmp[tmpIdx + 10] & MASK32_11) << 5;
l5 |= (tmp[tmpIdx + 11] >>> 6) & MASK32_5;
longs[longsIdx + 5] = l5;
long l6 = (tmp[tmpIdx + 11] & MASK32_6) << 15;
l6 |= (tmp[tmpIdx + 12] & MASK32_11) << 4;
l6 |= (tmp[tmpIdx + 13] >>> 7) & MASK32_4;
longs[longsIdx + 6] = l6;
long l7 = (tmp[tmpIdx + 13] & MASK32_7) << 14;
l7 |= (tmp[tmpIdx + 14] & MASK32_11) << 3;
l7 |= (tmp[tmpIdx + 15] >>> 8) & MASK32_3;
longs[longsIdx + 7] = l7;
long l8 = (tmp[tmpIdx + 15] & MASK32_8) << 13;
l8 |= (tmp[tmpIdx + 16] & MASK32_11) << 2;
l8 |= (tmp[tmpIdx + 17] >>> 9) & MASK32_2;
longs[longsIdx + 8] = l8;
long l9 = (tmp[tmpIdx + 17] & MASK32_9) << 12;
l9 |= (tmp[tmpIdx + 18] & MASK32_11) << 1;
l9 |= (tmp[tmpIdx + 19] >>> 10) & MASK32_1;
longs[longsIdx + 9] = l9;
long l10 = (tmp[tmpIdx + 19] & MASK32_10) << 11;
l10 |= (tmp[tmpIdx + 20] & MASK32_11) << 0;
longs[longsIdx + 10] = l10;
}
}
private static void decode22(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 44);
shiftLongs(tmp, 44, longs, offset, 10, MASK32_22);
for (int iter = 0, tmpIdx = 0, longsIdx = 44 + offset; iter < 4; ++iter, tmpIdx += 11, longsIdx += 5) {
long l0 = (tmp[tmpIdx + 0] & MASK32_10) << 12;
l0 |= (tmp[tmpIdx + 1] & MASK32_10) << 2;
l0 |= (tmp[tmpIdx + 2] >>> 8) & MASK32_2;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 2] & MASK32_8) << 14;
l1 |= (tmp[tmpIdx + 3] & MASK32_10) << 4;
l1 |= (tmp[tmpIdx + 4] >>> 6) & MASK32_4;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 4] & MASK32_6) << 16;
l2 |= (tmp[tmpIdx + 5] & MASK32_10) << 6;
l2 |= (tmp[tmpIdx + 6] >>> 4) & MASK32_6;
longs[longsIdx + 2] = l2;
long l3 = (tmp[tmpIdx + 6] & MASK32_4) << 18;
l3 |= (tmp[tmpIdx + 7] & MASK32_10) << 8;
l3 |= (tmp[tmpIdx + 8] >>> 2) & MASK32_8;
longs[longsIdx + 3] = l3;
long l4 = (tmp[tmpIdx + 8] & MASK32_2) << 20;
l4 |= (tmp[tmpIdx + 9] & MASK32_10) << 10;
l4 |= (tmp[tmpIdx + 10] & MASK32_10) << 0;
longs[longsIdx + 4] = l4;
}
}
private static void decode23(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 46);
shiftLongs(tmp, 46, longs, offset, 9, MASK32_23);
for (int iter = 0, tmpIdx = 0, longsIdx = 46 + offset; iter < 2; ++iter, tmpIdx += 23, longsIdx += 9) {
long l0 = (tmp[tmpIdx + 0] & MASK32_9) << 14;
l0 |= (tmp[tmpIdx + 1] & MASK32_9) << 5;
l0 |= (tmp[tmpIdx + 2] >>> 4) & MASK32_5;
longs[longsIdx + 0] = l0;
long l1 = (tmp[tmpIdx + 2] & MASK32_4) << 19;
l1 |= (tmp[tmpIdx + 3] & MASK32_9) << 10;
l1 |= (tmp[tmpIdx + 4] & MASK32_9) << 1;
l1 |= (tmp[tmpIdx + 5] >>> 8) & MASK32_1;
longs[longsIdx + 1] = l1;
long l2 = (tmp[tmpIdx + 5] & MASK32_8) << 15;
l2 |= (tmp[tmpIdx + 6] & MASK32_9) << 6;
l2 |= (tmp[tmpIdx + 7] >>> 3) & MASK32_6;
longs[longsIdx + 2] = l2;
long l3 = (tmp[tmpIdx + 7] & MASK32_3) << 20;
l3 |= (tmp[tmpIdx + 8] & MASK32_9) << 11;
l3 |= (tmp[tmpIdx + 9] & MASK32_9) << 2;
l3 |= (tmp[tmpIdx + 10] >>> 7) & MASK32_2;
longs[longsIdx + 3] = l3;
long l4 = (tmp[tmpIdx + 10] & MASK32_7) << 16;
l4 |= (tmp[tmpIdx + 11] & MASK32_9) << 7;
l4 |= (tmp[tmpIdx + 12] >>> 2) & MASK32_7;
longs[longsIdx + 4] = l4;
long l5 = (tmp[tmpIdx + 12] & MASK32_2) << 21;
l5 |= (tmp[tmpIdx + 13] & MASK32_9) << 12;
l5 |= (tmp[tmpIdx + 14] & MASK32_9) << 3;
l5 |= (tmp[tmpIdx + 15] >>> 6) & MASK32_3;
longs[longsIdx + 5] = l5;
long l6 = (tmp[tmpIdx + 15] & MASK32_6) << 17;
l6 |= (tmp[tmpIdx + 16] & MASK32_9) << 8;
l6 |= (tmp[tmpIdx + 17] >>> 1) & MASK32_8;
longs[longsIdx + 6] = l6;
long l7 = (tmp[tmpIdx + 17] & MASK32_1) << 22;
l7 |= (tmp[tmpIdx + 18] & MASK32_9) << 13;
l7 |= (tmp[tmpIdx + 19] & MASK32_9) << 4;
l7 |= (tmp[tmpIdx + 20] >>> 5) & MASK32_4;
longs[longsIdx + 7] = l7;
long l8 = (tmp[tmpIdx + 20] & MASK32_5) << 18;
l8 |= (tmp[tmpIdx + 21] & MASK32_9) << 9;
l8 |= (tmp[tmpIdx + 22] & MASK32_9) << 0;
longs[longsIdx + 8] = l8;
}
}
private static void decode24(DataInput in, long[] tmp, long[] longs, int offset) throws IOException {
in.readLongs(tmp, 0, 48);
shiftLongs(tmp, 48, longs, offset, 8, MASK32_24);
shiftLongs(tmp, 48, tmp, 0, 0, MASK32_8);
for (int iter = 0, tmpIdx = 0, longsIdx = 48 + offset; iter < 16; ++iter, tmpIdx += 3, longsIdx += 1) {
long l0 = tmp[tmpIdx + 0] << 16;
l0 |= tmp[tmpIdx + 1] << 8;
l0 |= tmp[tmpIdx + 2] << 0;
longs[longsIdx + 0] = l0;
}
}
}
| ForUtil |
java | google__guice | core/test/com/google/inject/OptionalBindingTest.java | {
"start": 9469,
"end": 9550
} | class ____ {
@Inject HasOptionalInjections hasOptionalInjections;
}
| Indirect |
java | grpc__grpc-java | api/src/main/java/io/grpc/InternalServer.java | {
"start": 864,
"end": 1041
} | class ____ {
public static final Context.Key<Server> SERVER_CONTEXT_KEY = Server.SERVER_CONTEXT_KEY;
// Prevent instantiation.
private InternalServer() {
}
}
| InternalServer |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/integration/multiple/exportprovider/MultipleRegistryCenterExportProviderRegistryProtocolListener.java | {
"start": 1400,
"end": 2387
} | class ____ implements RegistryProtocolListener {
private boolean exported = false;
/**
* {@inheritDoc}
*/
@Override
public void onExport(RegistryProtocol registryProtocol, Exporter<?> exporter) {
if (registryProtocol instanceof InterfaceCompatibleRegistryProtocol
&& exporter != null
&& exporter.getInvoker() != null
&& exporter.getInvoker().getInterface().equals(MultipleRegistryCenterExportProviderService.class)) {
this.exported = true;
}
}
/**
* {@inheritDoc}
*/
@Override
public void onRefer(RegistryProtocol registryProtocol, ClusterInvoker<?> invoker, URL url, URL registryURL) {}
/**
* {@inheritDoc}
*/
@Override
public void onDestroy() {}
/**
* Returns if this exporter is exported.
*/
public boolean isExported() {
return exported;
}
}
| MultipleRegistryCenterExportProviderRegistryProtocolListener |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-proxyexchange-webmvc/src/test/java/org/springframework/cloud/gateway/mvc/GetWithBodyRequestTests.java | {
"start": 3003,
"end": 4202
} | class ____ {
@Autowired
private TestRestTemplate rest;
@Autowired
private TestApplication testApplication;
@LocalServerPort
private int port;
@BeforeEach
public void init() throws Exception {
testApplication.setHome(new URI("http://localhost:" + port));
rest.getRestTemplate().setRequestFactory(new GetWithBodyRequestClientHttpRequestFactory());
}
@Test
public void get() {
assertThat(rest.getForObject("/proxy/0", Foo.class).getName()).isEqualTo("bye");
}
@Test
public void getWithBodyRequest() {
final HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON));
final Foo bodyRequest = new Foo("hello");
final HttpEntity<Foo> entity = new HttpEntity<>(bodyRequest, headers);
final ResponseEntity<Foo> response = rest.exchange("/proxy/get-with-body-request", HttpMethod.GET, entity,
Foo.class);
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(response.getBody()).isInstanceOfSatisfying(Foo.class,
foo -> assertThat(foo.getName()).isEqualTo("hello world"));
}
@SpringBootApplication
static | GetWithBodyRequestTests |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/EnableConfigurationPropertiesRegistrarTests.java | {
"start": 1500,
"end": 4186
} | class ____ {
private DefaultListableBeanFactory beanFactory;
private EnableConfigurationPropertiesRegistrar registrar;
@BeforeEach
void setup() {
this.beanFactory = spy(new DefaultListableBeanFactory());
this.registrar = new EnableConfigurationPropertiesRegistrar();
}
@Test
void typeWithDefaultConstructorShouldRegisterRootBeanDefinition() {
register(TestConfiguration.class);
BeanDefinition definition = this.beanFactory
.getBeanDefinition("foo-" + getClass().getName() + "$FooProperties");
assertThat(definition).satisfies(hasBindMethod(BindMethod.JAVA_BEAN));
}
@Test
void constructorBoundPropertiesShouldRegisterConfigurationPropertiesBeanDefinition() {
register(TestConfiguration.class);
BeanDefinition definition = this.beanFactory
.getBeanDefinition("bar-" + getClass().getName() + "$BarProperties");
assertThat(definition).satisfies(hasBindMethod(BindMethod.VALUE_OBJECT));
}
@Test
void typeWithMultipleConstructorsShouldRegisterGenericBeanDefinition() {
register(TestConfiguration.class);
BeanDefinition definition = this.beanFactory
.getBeanDefinition("bing-" + getClass().getName() + "$BingProperties");
assertThat(definition).satisfies(hasBindMethod(BindMethod.JAVA_BEAN));
}
@Test
void typeWithNoAnnotationShouldFail() {
assertThatIllegalStateException().isThrownBy(() -> register(InvalidConfiguration.class))
.withMessageContaining("No ConfigurationProperties annotation found")
.withMessageContaining(EnableConfigurationPropertiesRegistrar.class.getName());
}
@Test
void registrationWithDuplicatedTypeShouldRegisterSingleBeanDefinition() {
register(DuplicateConfiguration.class);
String name = "foo-" + getClass().getName() + "$FooProperties";
then(this.beanFactory).should().registerBeanDefinition(eq(name), any());
}
@Test
void registrationWithNoTypeShouldNotRegisterAnything() {
register(EmptyConfiguration.class);
String[] names = this.beanFactory.getBeanNamesForType(Object.class);
for (String name : names) {
assertThat(name).doesNotContain("-");
}
}
private Consumer<BeanDefinition> hasBindMethod(BindMethod bindMethod) {
return (definition) -> {
assertThat(definition.hasAttribute(BindMethod.class.getName())).isTrue();
assertThat(definition.getAttribute(BindMethod.class.getName())).isEqualTo(bindMethod);
};
}
private void register(Class<?> configuration) {
AnnotationMetadata metadata = AnnotationMetadata.introspect(configuration);
this.registrar.registerBeanDefinitions(metadata, this.beanFactory);
}
@EnableConfigurationProperties({ FooProperties.class, BarProperties.class, BingProperties.class })
static | EnableConfigurationPropertiesRegistrarTests |
java | grpc__grpc-java | rls/src/main/java/io/grpc/rls/LbPolicyConfiguration.java | {
"start": 10579,
"end": 10712
} | class ____ child load balancing policy with associated helper /
* utility classes to manage the child policy.
*/
static final | for |
java | spring-projects__spring-boot | module/spring-boot-data-rest/src/main/java/org/springframework/boot/data/rest/autoconfigure/DataRestProperties.java | {
"start": 2562,
"end": 5755
} | enum ____ translation through the Spring Data REST default
* resource bundle.
*/
private @Nullable Boolean enableEnumTranslation;
public @Nullable String getBasePath() {
return this.basePath;
}
public void setBasePath(@Nullable String basePath) {
this.basePath = basePath;
}
public @Nullable Integer getDefaultPageSize() {
return this.defaultPageSize;
}
public void setDefaultPageSize(@Nullable Integer defaultPageSize) {
this.defaultPageSize = defaultPageSize;
}
public @Nullable Integer getMaxPageSize() {
return this.maxPageSize;
}
public void setMaxPageSize(@Nullable Integer maxPageSize) {
this.maxPageSize = maxPageSize;
}
public @Nullable String getPageParamName() {
return this.pageParamName;
}
public void setPageParamName(@Nullable String pageParamName) {
this.pageParamName = pageParamName;
}
public @Nullable String getLimitParamName() {
return this.limitParamName;
}
public void setLimitParamName(@Nullable String limitParamName) {
this.limitParamName = limitParamName;
}
public @Nullable String getSortParamName() {
return this.sortParamName;
}
public void setSortParamName(@Nullable String sortParamName) {
this.sortParamName = sortParamName;
}
public RepositoryDetectionStrategies getDetectionStrategy() {
return this.detectionStrategy;
}
public void setDetectionStrategy(RepositoryDetectionStrategies detectionStrategy) {
this.detectionStrategy = detectionStrategy;
}
public @Nullable MediaType getDefaultMediaType() {
return this.defaultMediaType;
}
public void setDefaultMediaType(@Nullable MediaType defaultMediaType) {
this.defaultMediaType = defaultMediaType;
}
public @Nullable Boolean getReturnBodyOnCreate() {
return this.returnBodyOnCreate;
}
public void setReturnBodyOnCreate(@Nullable Boolean returnBodyOnCreate) {
this.returnBodyOnCreate = returnBodyOnCreate;
}
public @Nullable Boolean getReturnBodyOnUpdate() {
return this.returnBodyOnUpdate;
}
public void setReturnBodyOnUpdate(@Nullable Boolean returnBodyOnUpdate) {
this.returnBodyOnUpdate = returnBodyOnUpdate;
}
public @Nullable Boolean getEnableEnumTranslation() {
return this.enableEnumTranslation;
}
public void setEnableEnumTranslation(@Nullable Boolean enableEnumTranslation) {
this.enableEnumTranslation = enableEnumTranslation;
}
public void applyTo(RepositoryRestConfiguration rest) {
PropertyMapper map = PropertyMapper.get();
map.from(this::getBasePath).to(rest::setBasePath);
map.from(this::getDefaultPageSize).to(rest::setDefaultPageSize);
map.from(this::getMaxPageSize).to(rest::setMaxPageSize);
map.from(this::getPageParamName).to(rest::setPageParamName);
map.from(this::getLimitParamName).to(rest::setLimitParamName);
map.from(this::getSortParamName).to(rest::setSortParamName);
map.from(this::getDetectionStrategy).to(rest::setRepositoryDetectionStrategy);
map.from(this::getDefaultMediaType).to(rest::setDefaultMediaType);
map.from(this::getReturnBodyOnCreate).to(rest::setReturnBodyOnCreate);
map.from(this::getReturnBodyOnUpdate).to(rest::setReturnBodyOnUpdate);
map.from(this::getEnableEnumTranslation).to(rest::setEnableEnumTranslation);
}
}
| value |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationRequestsInfo.java | {
"start": 1237,
"end": 1463
} | class ____ a list of ReservationRequest and the
* interpreter which capture the semantic of this list (all/any/order).
*/
@XmlRootElement(name = "reservation-definition")
@XmlAccessorType(XmlAccessType.FIELD)
public | representing |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/IfBuildPropertyRepeatableStereotypeTest.java | {
"start": 3478,
"end": 3708
} | interface ____ {
}
@NotMatchingProperty
@Stereotype
@Inherited
@Target({ ElementType.TYPE, ElementType.METHOD, ElementType.FIELD })
@Retention(RetentionPolicy.RUNTIME)
public @ | TransitiveNotMatchingProperty |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java | {
"start": 1573,
"end": 10576
} | class ____ extends OptimizerRules.OptimizerExpressionRule<BinaryLogic> {
public CombineBinaryComparisons() {
super(OptimizerRules.TransformDirection.DOWN);
}
@Override
public Expression rule(BinaryLogic e, LogicalOptimizerContext ctx) {
if (e instanceof And and) {
return combine(ctx.foldCtx(), and);
} else if (e instanceof Or or) {
return combine(ctx.foldCtx(), or);
}
return e;
}
// combine conjunction
private static Expression combine(FoldContext ctx, And and) {
List<BinaryComparison> bcs = new ArrayList<>();
List<Expression> exps = new ArrayList<>();
boolean changed = false;
List<Expression> andExps = Predicates.splitAnd(and);
andExps.sort((o1, o2) -> {
if (o1 instanceof NotEquals && o2 instanceof NotEquals) {
return 0; // keep NotEquals' order
} else if (o1 instanceof NotEquals || o2 instanceof NotEquals) {
return o1 instanceof NotEquals ? 1 : -1; // push NotEquals up
} else {
return 0; // keep non-Ranges' and non-NotEquals' order
}
});
for (Expression ex : andExps) {
if (ex instanceof BinaryComparison bc && (ex instanceof Equals || ex instanceof NotEquals) == false) {
if (bc.right().foldable() && (findExistingComparison(ctx, bc, bcs, true))) {
changed = true;
} else {
bcs.add(bc);
}
} else if (ex instanceof NotEquals neq) {
if (neq.right().foldable() && notEqualsIsRemovableFromConjunction(ctx, neq, bcs)) {
// the non-equality can simply be dropped: either superfluous or has been merged with an updated range/inequality
changed = true;
} else { // not foldable OR not overlapping
exps.add(ex);
}
} else {
exps.add(ex);
}
}
return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, bcs)) : and;
}
// combine disjunction
private static Expression combine(FoldContext ctx, Or or) {
List<BinaryComparison> bcs = new ArrayList<>();
List<Expression> exps = new ArrayList<>();
boolean changed = false;
for (Expression ex : Predicates.splitOr(or)) {
if (ex instanceof BinaryComparison bc) {
if (bc.right().foldable() && findExistingComparison(ctx, bc, bcs, false)) {
changed = true;
} else {
bcs.add(bc);
}
} else {
exps.add(ex);
}
}
return changed ? Predicates.combineOr(CollectionUtils.combine(exps, bcs)) : or;
}
/**
* Find commonalities between the given comparison in the given list.
* The method can be applied both for conjunctive (AND) or disjunctive purposes (OR).
*/
private static boolean findExistingComparison(FoldContext ctx, BinaryComparison main, List<BinaryComparison> bcs, boolean conjunctive) {
Object value = main.right().fold(ctx);
// NB: the loop modifies the list (hence why the int is used)
for (int i = 0; i < bcs.size(); i++) {
BinaryComparison other = bcs.get(i);
// skip if cannot evaluate
if (other.right().foldable() == false) {
continue;
}
// if bc is a higher/lower value or gte vs gt, use it instead
if ((other instanceof GreaterThan || other instanceof GreaterThanOrEqual)
&& (main instanceof GreaterThan || main instanceof GreaterThanOrEqual)) {
if (main.left().semanticEquals(other.left())) {
Integer compare = BinaryComparison.compare(value, other.right().fold(ctx));
if (compare != null) {
// AND
if ((conjunctive &&
// a > 3 AND a > 2 -> a > 3
(compare > 0 ||
// a > 2 AND a >= 2 -> a > 2
(compare == 0 && main instanceof GreaterThan && other instanceof GreaterThanOrEqual))) ||
// OR
(conjunctive == false &&
// a > 2 OR a > 3 -> a > 2
(compare < 0 ||
// a >= 2 OR a > 2 -> a >= 2
(compare == 0 && main instanceof GreaterThanOrEqual && other instanceof GreaterThan)))) {
bcs.remove(i);
bcs.add(i, main);
}
// found a match
return true;
}
return false;
}
}
// if bc is a lower/higher value or lte vs lt, use it instead
else if ((other instanceof LessThan || other instanceof LessThanOrEqual)
&& (main instanceof LessThan || main instanceof LessThanOrEqual)) {
if (main.left().semanticEquals(other.left())) {
Integer compare = BinaryComparison.compare(value, other.right().fold(ctx));
if (compare != null) {
// AND
if ((conjunctive &&
// a < 2 AND a < 3 -> a < 2
(compare < 0 ||
// a < 2 AND a <= 2 -> a < 2
(compare == 0 && main instanceof LessThan && other instanceof LessThanOrEqual))) ||
// OR
(conjunctive == false &&
// a < 2 OR a < 3 -> a < 3
(compare > 0 ||
// a <= 2 OR a < 2 -> a <= 2
(compare == 0 && main instanceof LessThanOrEqual && other instanceof LessThan)))) {
bcs.remove(i);
bcs.add(i, main);
}
// found a match
return true;
}
return false;
}
}
}
return false;
}
private static boolean notEqualsIsRemovableFromConjunction(FoldContext ctx, NotEquals notEquals, List<BinaryComparison> bcs) {
Object neqVal = notEquals.right().fold(ctx);
Integer comp;
// check on "condition-overlapping" inequalities:
// a != 2 AND a > 3 -> a > 3 (discard NotEquals)
// a != 2 AND a >= 2 -> a > 2 (discard NotEquals plus update inequality)
// a != 2 AND a > 1 -> nop (do nothing)
//
// a != 2 AND a < 3 -> nop
// a != 2 AND a <= 2 -> a < 2
// a != 2 AND a < 1 -> a < 1
for (int i = 0; i < bcs.size(); i++) {
BinaryComparison bc = bcs.get(i);
if (notEquals.left().semanticEquals(bc.left())) {
if (bc instanceof LessThan || bc instanceof LessThanOrEqual) {
comp = bc.right().foldable() ? BinaryComparison.compare(neqVal, bc.right().fold(ctx)) : null;
if (comp != null) {
if (comp >= 0) {
if (comp == 0 && bc instanceof LessThanOrEqual) { // a != 2 AND a <= 2 -> a < 2
bcs.set(i, new LessThan(bc.source(), bc.left(), bc.right(), bc.zoneId()));
} // else : comp > 0 (a != 2 AND a </<= 1 -> a </<= 1), or == 0 && bc i.of "<" (a != 2 AND a < 2 -> a < 2)
return true;
} // else: comp < 0 : a != 2 AND a </<= 3 -> nop
} // else: non-comparable, nop
} else if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) {
comp = bc.right().foldable() ? BinaryComparison.compare(neqVal, bc.right().fold(ctx)) : null;
if (comp != null) {
if (comp <= 0) {
if (comp == 0 && bc instanceof GreaterThanOrEqual) { // a != 2 AND a >= 2 -> a > 2
bcs.set(i, new GreaterThan(bc.source(), bc.left(), bc.right(), bc.zoneId()));
} // else: comp < 0 (a != 2 AND a >/>= 3 -> a >/>= 3), or == 0 && bc i.of ">" (a != 2 AND a > 2 -> a > 2)
return true;
} // else: comp > 0 : a != 2 AND a >/>= 1 -> nop
} // else: non-comparable, nop
} // else: other non-relevant type
}
}
return false;
}
}
| CombineBinaryComparisons |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/transformations/StreamExchangeMode.java | {
"start": 1067,
"end": 2053
} | enum ____ {
/**
* Producer and consumer are online at the same time. Produced data is received by consumer
* immediately.
*/
PIPELINED,
/**
* The producer first produces its entire result and finishes. After that, the consumer is
* started and may consume the data.
*/
BATCH,
/**
* The consumer can start consuming data anytime as long as the producer has started producing.
*
* <p>This exchange mode is re-consumable.
*/
HYBRID_FULL,
/**
* The consumer can start consuming data anytime as long as the producer has started producing.
*
* <p>This exchange mode is not re-consumable.
*/
HYBRID_SELECTIVE,
/**
* The exchange mode is undefined. It leaves it up to the framework to decide the exchange mode.
* The framework will pick one of {@link StreamExchangeMode#BATCH} or {@link
* StreamExchangeMode#PIPELINED} in the end.
*/
UNDEFINED
}
| StreamExchangeMode |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/component/log/SpringLogMarkerTest.java | {
"start": 1139,
"end": 1855
} | class ____ extends SpringTestSupport {
protected Object expectedBody = "Hello there!";
protected String uri = "direct:start";
@Test
public void testSendingCamelExchangeToEndpointResultsInValidApplicationEventAfterTheRefreshEvent() throws Exception {
MockEndpoint result = resolveMandatoryEndpoint("mock:result", MockEndpoint.class);
result.expectedMessageCount(1);
template.sendBody(uri, expectedBody);
result.assertIsSatisfied();
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/log/SpringLogMarkerTest-context.xml");
}
}
| SpringLogMarkerTest |
java | apache__camel | tooling/maven/camel-eip-documentation-enricher-maven-plugin/src/test/java/org/apache/camel/maven/CamelSpringNamespaceTest.java | {
"start": 1053,
"end": 2048
} | class ____ {
private CamelSpringNamespace camelSpringNamespace = new CamelSpringNamespace();
@Test
public void testSchemaNamespace() {
assertEquals(Constants.XML_SCHEMA_NAMESPACE_URI,
camelSpringNamespace.getNamespaceURI(Constants.XML_SCHEMA_NAMESPACE_PREFIX));
assertNull(camelSpringNamespace.getNamespaceURI("unregisterdPrefix"));
}
@Test
public void testGetPrefix() {
try {
camelSpringNamespace.getPrefix(Constants.XML_SCHEMA_NAMESPACE_URI);
fail("UnsupportedOperationException expected");
} catch (UnsupportedOperationException e) {
// Expected.
}
}
@Test
public void testGetPrefixes() {
try {
camelSpringNamespace.getPrefixes(Constants.XML_SCHEMA_NAMESPACE_URI);
fail("UnsupportedOperationException expected");
} catch (UnsupportedOperationException e) {
// Expected.
}
}
}
| CamelSpringNamespaceTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/schedulers/IoScheduler.java | {
"start": 7586,
"end": 9096
} | class ____ extends Scheduler.Worker implements Runnable {
private final CompositeDisposable tasks;
private final CachedWorkerPool pool;
private final ThreadWorker threadWorker;
final AtomicBoolean once = new AtomicBoolean();
EventLoopWorker(CachedWorkerPool pool) {
this.pool = pool;
this.tasks = new CompositeDisposable();
this.threadWorker = pool.get();
}
@Override
public void dispose() {
if (once.compareAndSet(false, true)) {
tasks.dispose();
if (USE_SCHEDULED_RELEASE) {
threadWorker.scheduleActual(this, 0, TimeUnit.NANOSECONDS, null);
} else {
// releasing the pool should be the last action
pool.release(threadWorker);
}
}
}
@Override
public void run() {
pool.release(threadWorker);
}
@Override
public boolean isDisposed() {
return once.get();
}
@NonNull
@Override
public Disposable schedule(@NonNull Runnable action, long delayTime, @NonNull TimeUnit unit) {
if (tasks.isDisposed()) {
// don't schedule, we are unsubscribed
return EmptyDisposable.INSTANCE;
}
return threadWorker.scheduleActual(action, delayTime, unit, tasks);
}
}
static final | EventLoopWorker |
java | apache__flink | flink-python/src/main/java/org/apache/flink/client/python/PythonFunctionFactory.java | {
"start": 2844,
"end": 4910
} | interface ____ {
ScheduledExecutorService CACHE_CLEANUP_EXECUTOR_SERVICE =
Executors.newSingleThreadScheduledExecutor(
new ExecutorThreadFactory("PythonFunctionFactory"));
AtomicReference<Boolean> CACHE_CLEANUP_EXECUTOR_SERVICE_STARTED = new AtomicReference<>(false);
LoadingCache<CacheKey, PythonFunctionFactory> PYTHON_FUNCTION_FACTORY_CACHE =
CacheBuilder.newBuilder()
.expireAfterAccess(1, TimeUnit.MINUTES)
.maximumSize(maxConcurrentPythonFunctionFactories)
.removalListener(
(RemovalListener<CacheKey, PythonFunctionFactory>)
removalNotification -> {
if (removalNotification.getValue() instanceof Closeable) {
try {
((Closeable) removalNotification.getValue())
.close();
} catch (IOException ignore) {
}
}
})
.build(
new CacheLoader<CacheKey, PythonFunctionFactory>() {
@Override
public PythonFunctionFactory load(CacheKey cacheKey) {
try {
return createPythonFunctionFactory(cacheKey.config);
} catch (Throwable t) {
throw new RuntimeException(t);
}
}
});
/**
* Returns PythonFunction according to moduleName and objectName.
*
* @param moduleName The module name of the Python UDF.
* @param objectName The function name / | PythonFunctionFactory |
java | google__gson | gson/src/test/java/com/google/gson/functional/MoreSpecificTypeSerializationTest.java | {
"start": 4983,
"end": 5097
} | class ____<T> {
T t;
ParameterizedBase(T t) {
this.t = t;
}
}
private static | ParameterizedBase |
java | playframework__playframework | documentation/manual/working/javaGuide/main/http/code/javaguide/ActionCreator.java | {
"start": 324,
"end": 646
} | class ____ implements play.http.ActionCreator {
@Override
public Action createAction(Http.Request request, Method actionMethod) {
return new Action.Simple() {
@Override
public CompletionStage<Result> call(Http.Request req) {
return delegate.call(req);
}
};
}
}
// #default
| ActionCreator |
java | apache__camel | components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/ringbuffer/HazelcastRingbufferComponent.java | {
"start": 1202,
"end": 1723
} | class ____ extends HazelcastDefaultComponent {
public HazelcastRingbufferComponent() {
}
public HazelcastRingbufferComponent(final CamelContext context) {
super(context);
}
@Override
protected HazelcastDefaultEndpoint doCreateEndpoint(
String uri, String remaining, Map<String, Object> parameters, HazelcastInstance hzInstance)
throws Exception {
return new HazelcastRingbufferEndpoint(hzInstance, uri, this, remaining);
}
}
| HazelcastRingbufferComponent |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParserTests.java | {
"start": 1468,
"end": 8204
} | class ____ extends ESTestCase {
public void testParseSimpleFieldExpression() throws Exception {
String json = "{ \"field\": { \"username\" : \"*@shield.gov\" } }";
FieldExpression field = checkExpressionType(parse(json), FieldExpression.class);
assertThat(field.getField(), equalTo("username"));
assertThat(field.getValues(), iterableWithSize(1));
final FieldValue value = field.getValues().get(0);
assertThat(value.getValue(), equalTo("*@shield.gov"));
assertThat(value.getAutomaton(), notNullValue());
assertThat(value.getAutomaton().run("bob@shield.gov"), equalTo(true));
assertThat(value.getAutomaton().run("bob@example.net"), equalTo(false));
assertThat(json(field), equalTo(json.replaceAll("\\s", "")));
}
public void testParseComplexExpression() throws Exception {
String json = """
{ "any": [
{ "field": { "username" : "*@shield.gov" } },
{ "all": [
{ "field": { "username" : "/.*\\\\@avengers\\\\.(net|org)/" } },
{ "field": { "groups" : [ "admin", "operators" ] } },
{ "except":
{ "field": { "groups" : "disavowed" } }
}
] }
] }""";
final RoleMapperExpression expr = parse(json);
assertThat(expr, instanceOf(AnyExpression.class));
AnyExpression any = (AnyExpression) expr;
assertThat(any.getElements(), iterableWithSize(2));
final FieldExpression fieldShield = checkExpressionType(any.getElements().get(0), FieldExpression.class);
assertThat(fieldShield.getField(), equalTo("username"));
assertThat(fieldShield.getValues(), iterableWithSize(1));
final FieldValue valueShield = fieldShield.getValues().get(0);
assertThat(valueShield.getValue(), equalTo("*@shield.gov"));
assertThat(valueShield.getAutomaton(), notNullValue());
assertThat(valueShield.getAutomaton().run("fury@shield.gov"), equalTo(true));
assertThat(valueShield.getAutomaton().run("fury@shield.net"), equalTo(false));
final AllExpression all = checkExpressionType(any.getElements().get(1), AllExpression.class);
assertThat(all.getElements(), iterableWithSize(3));
final FieldExpression fieldAvengers = checkExpressionType(all.getElements().get(0), FieldExpression.class);
assertThat(fieldAvengers.getField(), equalTo("username"));
assertThat(fieldAvengers.getValues(), iterableWithSize(1));
final FieldValue valueAvengers = fieldAvengers.getValues().get(0);
assertThat(valueAvengers.getAutomaton().run("stark@avengers.net"), equalTo(true));
assertThat(valueAvengers.getAutomaton().run("romanov@avengers.org"), equalTo(true));
assertThat(valueAvengers.getAutomaton().run("fury@shield.gov"), equalTo(false));
final FieldExpression fieldGroupsAdmin = checkExpressionType(all.getElements().get(1), FieldExpression.class);
assertThat(fieldGroupsAdmin.getField(), equalTo("groups"));
assertThat(fieldGroupsAdmin.getValues(), iterableWithSize(2));
assertThat(fieldGroupsAdmin.getValues().get(0).getValue(), equalTo("admin"));
assertThat(fieldGroupsAdmin.getValues().get(1).getValue(), equalTo("operators"));
final ExceptExpression except = checkExpressionType(all.getElements().get(2), ExceptExpression.class);
final FieldExpression fieldDisavowed = checkExpressionType(except.getInnerExpression(), FieldExpression.class);
assertThat(fieldDisavowed.getField(), equalTo("groups"));
assertThat(fieldDisavowed.getValues(), iterableWithSize(1));
assertThat(fieldDisavowed.getValues().get(0).getValue(), equalTo("disavowed"));
ExpressionModel hawkeye = new ExpressionModel();
hawkeye.defineField("username", "hawkeye@avengers.org");
hawkeye.defineField("groups", Arrays.asList("operators"));
assertThat(expr.match(hawkeye), equalTo(true));
ExpressionModel captain = new ExpressionModel();
captain.defineField("username", "america@avengers.net");
assertThat(expr.match(captain), equalTo(false));
ExpressionModel warmachine = new ExpressionModel();
warmachine.defineField("username", "warmachine@avengers.net");
warmachine.defineField("groups", Arrays.asList("admin", "disavowed"));
assertThat(expr.match(warmachine), equalTo(false));
ExpressionModel fury = new ExpressionModel();
fury.defineField("username", "fury@shield.gov");
fury.defineField("groups", Arrays.asList("classified", "directors"));
assertThat(expr.asPredicate().test(fury), equalTo(true));
assertThat(json(expr), equalTo(json.replaceAll("\\s", "")));
}
public void testWriteAndReadFromStream() throws Exception {
String json = """
{ "any": [
{ "field": { "username" : "*@shield.gov" } },
{ "all": [
{ "field": { "username" : "/.*\\\\@avengers\\\\.(net|org)/" } },
{ "field": { "groups" : [ "admin", "operators" ] } },
{ "except":
{ "field": { "groups" : "disavowed" } }
}
] }
] }""";
final RoleMapperExpression exprSource = parse(json);
final BytesStreamOutput out = new BytesStreamOutput();
ExpressionParser.writeExpression(exprSource, out);
final Settings settings = Settings.builder().put("path.home", createTempDir()).build();
final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables());
final NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry);
final RoleMapperExpression exprResult = ExpressionParser.readExpression(input);
assertThat(json(exprResult), equalTo(json.replaceAll("\\s", "")));
}
private <T> T checkExpressionType(RoleMapperExpression expr, Class<T> type) {
assertThat(expr, instanceOf(type));
return type.cast(expr);
}
private RoleMapperExpression parse(String json) throws IOException {
return new ExpressionParser().parse("rules", new XContentSource(new BytesArray(json), XContentType.JSON));
}
private String json(RoleMapperExpression node) throws IOException {
final StringWriter writer = new StringWriter();
try (XContentBuilder builder = XContentFactory.jsonBuilder(new WriterOutputStream(writer))) {
node.toXContent(builder, ToXContent.EMPTY_PARAMS);
}
return writer.toString();
}
}
| ExpressionParserTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeUtils.java | {
"start": 1548,
"end": 4237
} | class ____ {
public static final Logger LOG = LoggerFactory.getLogger(NameNodeUtils.class);
/**
* Return the namenode address that will be used by clients to access this
* namenode or name service. This needs to be called before the config
* is overriden.
*
* This method behaves as follows:
*
* 1. fs.defaultFS is undefined:
* - return null.
* 2. fs.defaultFS is defined but has no hostname (logical or physical):
* - return null.
* 3. Single NN (no HA, no federation):
* - return URI authority from fs.defaultFS
* 4. Current NN is in an HA nameservice (with or without federation):
* - return nameservice for current NN.
* 5. Current NN is in non-HA namespace, federated cluster:
* - return value of dfs.namenode.rpc-address.[nsId].[nnId]
* - If the above key is not defined, then return authority from
* fs.defaultFS if the port number is > 0.
* 6. If port number in the authority is missing or zero in step 6:
* - return null
*/
@VisibleForTesting
@Nullable
static String getClientNamenodeAddress(
Configuration conf, @Nullable String nsId) {
final Collection<String> nameservices =
DFSUtilClient.getNameServiceIds(conf);
final String nnAddr = conf.getTrimmed(FS_DEFAULT_NAME_KEY);
if (nnAddr == null) {
// default fs is not set.
return null;
}
LOG.info("{} is {}", FS_DEFAULT_NAME_KEY, nnAddr);
final URI nnUri = URI.create(nnAddr);
String defaultNnHost = nnUri.getHost();
if (defaultNnHost == null) {
return null;
}
// Current Nameservice is HA.
if (nsId != null && nameservices.contains(nsId)) {
final Collection<String> namenodes = conf.getTrimmedStringCollection(
DFS_HA_NAMENODES_KEY_PREFIX + "." + nsId);
if (namenodes.size() > 1) {
return nsId;
}
}
// Federation without HA. We must handle the case when the current NN
// is not in the default nameservice.
String currentNnAddress = null;
if (nsId != null) {
String hostNameKey = DFS_NAMENODE_RPC_ADDRESS_KEY + "." + nsId;
currentNnAddress = conf.get(hostNameKey);
}
// Fallback to the address in fs.defaultFS.
if (currentNnAddress == null) {
currentNnAddress = nnUri.getAuthority();
}
int port = 0;
if (currentNnAddress.contains(":")) {
port = Integer.parseInt(currentNnAddress.split(":")[1]);
}
if (port > 0) {
return currentNnAddress;
} else {
// the port is missing or 0. Figure out real bind address later.
return null;
}
}
private NameNodeUtils() {
// Disallow construction
}
}
| NameNodeUtils |
java | apache__rocketmq | common/src/main/java/org/apache/rocketmq/common/logging/JoranConfiguratorExt.java | {
"start": 1282,
"end": 2929
} | class ____ extends JoranConfigurator {
private InputStream transformXml(InputStream in) throws IOException {
try {
String str = CharStreams.toString(new InputStreamReader(in, StandardCharsets.UTF_8));
str = str.replace("\"ch.qos.logback", "\"org.apache.rocketmq.logging.ch.qos.logback");
return new ByteArrayInputStream(str.getBytes(StandardCharsets.UTF_8));
} finally {
if (null != in) {
in.close();
}
}
}
public final void doConfigure0(URL url) throws JoranException {
InputStream in = null;
try {
informContextOfURLUsedForConfiguration(getContext(), url);
URLConnection urlConnection = url.openConnection();
// per http://jira.qos.ch/browse/LBCORE-105
// per http://jira.qos.ch/browse/LBCORE-127
urlConnection.setUseCaches(false);
InputStream temp = urlConnection.getInputStream();
in = transformXml(temp);
doConfigure(in, url.toExternalForm());
} catch (IOException ioe) {
String errMsg = "Could not open URL [" + url + "].";
addError(errMsg, ioe);
throw new JoranException(errMsg, ioe);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ioe) {
String errMsg = "Could not close input stream";
addError(errMsg, ioe);
throw new JoranException(errMsg, ioe);
}
}
}
}
}
| JoranConfiguratorExt |
java | quarkusio__quarkus | integration-tests/gradle/src/main/resources/test-fixtures-multi-module/library-1/src/testFixtures/java/org/example/TestHelper.java | {
"start": 30,
"end": 135
} | class ____ {
public void setUp() {
StaticInitLibrary.init(new OfyFactory());
}
}
| TestHelper |
java | apache__logging-log4j2 | log4j-1.2-api/src/test/java/org/apache/log4j/config/SyslogAppenderTest.java | {
"start": 1397,
"end": 2681
} | class ____ {
private static MockSyslogServer syslogServer;
@BeforeAll
static void beforeAll() throws IOException {
initTCPTestEnvironment(null);
System.setProperty("syslog.port", Integer.toString(syslogServer.getLocalPort()));
System.setProperty(
ConfigurationFactory.LOG4J1_CONFIGURATION_FILE_PROPERTY, "target/test-classes/log4j1-syslog.xml");
}
@AfterAll
static void afterAll() {
System.clearProperty(ConfigurationFactory.LOG4J1_CONFIGURATION_FILE_PROPERTY);
syslogServer.shutdown();
}
@Test
void sendMessage() throws Exception {
final Logger logger = LogManager.getLogger(SyslogAppenderTest.class);
logger.info("This is a test");
List<String> messages = null;
for (int i = 0; i < 5; ++i) {
Thread.sleep(250);
messages = syslogServer.getMessageList();
if (messages != null && !messages.isEmpty()) {
break;
}
}
assertThat(messages, hasSize(1));
}
protected static void initTCPTestEnvironment(final String messageFormat) throws IOException {
syslogServer = MockSyslogServerFactory.createTCPSyslogServer();
syslogServer.start();
}
}
| SyslogAppenderTest |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiStreamingProcessorTests.java | {
"start": 1330,
"end": 6980
} | class ____ extends ESTestCase {
public void testParseOpenAiResponse() throws IOException {
var item = new ArrayDeque<ServerSentEvent>();
item.offer(new ServerSentEvent("""
{
"id":"12345",
"object":"chat.completion.chunk",
"created":123456789,
"model":"gpt-4o-mini",
"system_fingerprint": "123456789",
"choices":[
{
"index":0,
"delta":{
"content":"test"
},
"logprobs":null,
"finish_reason":null
}
]
}
"""));
var response = onNext(new OpenAiStreamingProcessor(), item);
var json = toJsonString(response);
assertThat(json, equalTo("""
{"completion":[{"delta":"test"}]}"""));
}
public void testParseWithFinish() throws IOException {
var item = new ArrayDeque<ServerSentEvent>();
item.offer(new ServerSentEvent("""
{
"id":"12345",
"object":"chat.completion.chunk",
"created":123456789,
"model":"gpt-4o-mini",
"system_fingerprint": "123456789",
"choices":[
{
"index":0,
"delta":{
"content":"hello, world"
},
"logprobs":null,
"finish_reason":null
}
]
}
"""));
item.offer(new ServerSentEvent("""
{
"id":"12345",
"object":"chat.completion.chunk",
"created":123456789,
"model":"gpt-4o-mini",
"system_fingerprint": "123456789",
"choices":[
{
"index":1,
"delta":{},
"logprobs":null,
"finish_reason":"stop"
}
]
}
"""));
var response = onNext(new OpenAiStreamingProcessor(), item);
var json = toJsonString(response);
assertThat(json, equalTo("""
{"completion":[{"delta":"hello, world"}]}"""));
}
public void testParseErrorCallsOnError() {
var item = new ArrayDeque<ServerSentEvent>();
item.offer(new ServerSentEvent("this isn't json"));
var exception = onError(new OpenAiStreamingProcessor(), item);
assertThat(exception, instanceOf(XContentParseException.class));
}
public void testEmptyResultsRequestsMoreData() throws Exception {
var emptyDeque = new ArrayDeque<ServerSentEvent>();
var processor = new OpenAiStreamingProcessor();
Flow.Subscriber<ChunkedToXContent> downstream = mock();
processor.subscribe(downstream);
Flow.Subscription upstream = mock();
processor.onSubscribe(upstream);
processor.next(emptyDeque);
verify(upstream, times(1)).request(1);
verify(downstream, times(0)).onNext(any());
}
public void testDoneMessageIsIgnored() throws Exception {
var item = new ArrayDeque<ServerSentEvent>();
item.offer(new ServerSentEvent("[DONE]"));
var processor = new OpenAiStreamingProcessor();
Flow.Subscriber<ChunkedToXContent> downstream = mock();
processor.subscribe(downstream);
Flow.Subscription upstream = mock();
processor.onSubscribe(upstream);
processor.next(item);
verify(upstream, times(1)).request(1);
verify(downstream, times(0)).onNext(any());
}
public void testInitialLlamaResponseIsIgnored() throws Exception {
var item = new ArrayDeque<ServerSentEvent>();
item.offer(new ServerSentEvent("""
{
"id":"12345",
"object":"chat.completion.chunk",
"created":123456789,
"model":"Llama-2-7b-chat",
"system_fingerprint": "123456789",
"choices":[
{
"index":0,
"delta":{
"role":"assistant"
},
"logprobs":null,
"finish_reason":null
}
]
}
"""));
var processor = new OpenAiStreamingProcessor();
Flow.Subscriber<ChunkedToXContent> downstream = mock();
processor.subscribe(downstream);
Flow.Subscription upstream = mock();
processor.onSubscribe(upstream);
processor.next(item);
verify(upstream, times(1)).request(1);
verify(downstream, times(0)).onNext(any());
}
private String toJsonString(ChunkedToXContent chunkedToXContent) throws IOException {
try (var builder = XContentFactory.jsonBuilder()) {
chunkedToXContent.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> {
try {
xContent.toXContent(builder, EMPTY_PARAMS);
} catch (IOException e) {
logger.error(e.getMessage(), e);
fail(e.getMessage());
}
});
return XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType());
}
}
}
| OpenAiStreamingProcessorTests |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/textstructure/structurefinder/TextStructureTests.java | {
"start": 766,
"end": 4691
} | class ____ extends AbstractXContentSerializingTestCase<TextStructure> {
@Override
protected TextStructure createTestInstance() {
return createTestFileStructure();
}
@Override
protected TextStructure mutateInstance(TextStructure instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
public static TextStructure createTestFileStructure() {
TextStructure.Format format = randomFrom(EnumSet.allOf(TextStructure.Format.class));
TextStructure.Builder builder = new TextStructure.Builder(format);
int numLinesAnalyzed = randomIntBetween(2, 10000);
builder.setNumLinesAnalyzed(numLinesAnalyzed);
int numMessagesAnalyzed = randomIntBetween(1, numLinesAnalyzed);
builder.setNumMessagesAnalyzed(numMessagesAnalyzed);
builder.setSampleStart(randomAlphaOfLength(1000));
String charset = randomFrom(Charset.availableCharsets().keySet());
builder.setCharset(charset);
if (charset.toUpperCase(Locale.ROOT).startsWith("UTF")) {
builder.setHasByteOrderMarker(randomBoolean());
}
if (numMessagesAnalyzed < numLinesAnalyzed) {
builder.setMultilineStartPattern(randomAlphaOfLength(100));
}
if (randomBoolean()) {
builder.setExcludeLinesPattern(randomAlphaOfLength(100));
}
if (format == TextStructure.Format.DELIMITED) {
builder.setColumnNames(Arrays.asList(generateRandomStringArray(10, 10, false, false)));
builder.setHasHeaderRow(randomBoolean());
builder.setDelimiter(randomFrom(',', '\t', ';', '|'));
builder.setQuote(randomFrom('"', '\''));
}
if (format == TextStructure.Format.SEMI_STRUCTURED_TEXT) {
builder.setGrokPattern(randomAlphaOfLength(100));
}
if (format == TextStructure.Format.SEMI_STRUCTURED_TEXT || randomBoolean()) {
builder.setTimestampField(randomAlphaOfLength(10));
builder.setJodaTimestampFormats(Arrays.asList(generateRandomStringArray(3, 20, false, false)));
builder.setJavaTimestampFormats(Arrays.asList(generateRandomStringArray(3, 20, false, false)));
builder.setNeedClientTimezone(randomBoolean());
}
Map<String, Object> mappings = new TreeMap<>();
for (String field : generateRandomStringArray(5, 20, false, false)) {
mappings.put(field, Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(10)));
}
builder.setMappings(mappings);
if (randomBoolean()) {
Map<String, Object> ingestPipeline = new LinkedHashMap<>();
for (String field : generateRandomStringArray(5, 20, false, false)) {
ingestPipeline.put(field, Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(10)));
}
builder.setMappings(ingestPipeline);
}
if (randomBoolean()) {
Map<String, FieldStats> fieldStats = new TreeMap<>();
for (String field : generateRandomStringArray(5, 20, false, false)) {
fieldStats.put(field, FieldStatsTests.createTestFieldStats());
}
builder.setFieldStats(fieldStats);
}
builder.setExplanation(Arrays.asList(generateRandomStringArray(10, 150, false, false)));
return builder.build();
}
@Override
protected Writeable.Reader<TextStructure> instanceReader() {
return TextStructure::new;
}
@Override
protected TextStructure doParseInstance(XContentParser parser) {
return TextStructure.PARSER.apply(parser, null).build();
}
@Override
protected ToXContent.Params getToXContentParams() {
return new ToXContent.MapParams(Collections.singletonMap(TextStructure.EXPLAIN, "true"));
}
}
| TextStructureTests |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/CharRangeTest.java | {
"start": 1468,
"end": 1560
} | class ____ extends AbstractLangTest {
@Test
void testClass() {
// | CharRangeTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java | {
"start": 2485,
"end": 3127
} | class ____ extends MutableMetric implements Closeable {
private MutableRatesWithAggregation innerMetrics =
new MutableRatesWithAggregation();
@VisibleForTesting
static final ScheduledExecutorService SCHEDULER = Executors
.newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true)
.setNameFormat("MutableRollingAverages-%d").build());
private ScheduledFuture<?> scheduledTask = null;
@Nullable
private Map<String, MutableRate> currentSnapshot;
private final String avgInfoNameTemplate;
private final String avgInfoDescTemplate;
private int numWindows;
/**
* This | MutableRollingAverages |
java | elastic__elasticsearch | client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java | {
"start": 1516,
"end": 1845
} | class ____ exposes static methods to unify the way requests are logged.
* Includes trace logging to log complete requests and responses in curl format.
* Useful for debugging, manually sending logged requests via curl and checking their responses.
* Trace logging is a feature that all the language clients provide.
*/
final | that |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/SpringBootTestDefaultConfigurationTests.java | {
"start": 1247,
"end": 1463
} | class ____ {
@Autowired
private Config config;
@Test
void nestedConfigClasses() {
assertThat(this.config).isNotNull();
}
@Configuration(proxyBeanMethods = false)
static | SpringBootTestDefaultConfigurationTests |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/single/SingleSubscribeTest.java | {
"start": 1147,
"end": 8096
} | class ____ extends RxJavaTest {
@Test
public void consumer() {
final Integer[] value = { null };
Single.just(1).subscribe(new Consumer<Integer>() {
@Override
public void accept(Integer v) throws Exception {
value[0] = v;
}
});
assertEquals((Integer)1, value[0]);
}
@Test
public void biconsumer() {
final Object[] value = { null, null };
Single.just(1).subscribe(new BiConsumer<Integer, Throwable>() {
@Override
public void accept(Integer v, Throwable e) throws Exception {
value[0] = v;
value[1] = e;
}
});
assertEquals(1, value[0]);
assertNull(value[1]);
}
@Test
public void biconsumerError() {
final Object[] value = { null, null };
TestException ex = new TestException();
Single.error(ex).subscribe(new BiConsumer<Object, Throwable>() {
@Override
public void accept(Object v, Throwable e) throws Exception {
value[0] = v;
value[1] = e;
}
});
assertNull(value[0]);
assertEquals(ex, value[1]);
}
@Test
public void subscribeThrows() {
try {
new Single<Integer>() {
@Override
protected void subscribeActual(SingleObserver<? super Integer> observer) {
throw new IllegalArgumentException();
}
}.test();
} catch (NullPointerException ex) {
if (!(ex.getCause() instanceof IllegalArgumentException)) {
fail(ex.toString() + ": should have thrown NPE(IAE)");
}
}
}
@Test
public void biConsumerDispose() {
PublishSubject<Integer> ps = PublishSubject.create();
Disposable d = ps.single(-99).subscribe(new BiConsumer<Object, Object>() {
@Override
public void accept(Object t1, Object t2) throws Exception {
}
});
assertFalse(d.isDisposed());
d.dispose();
assertTrue(d.isDisposed());
assertFalse(ps.hasObservers());
}
@Test
public void consumerDispose() {
PublishSubject<Integer> ps = PublishSubject.create();
Disposable d = ps.single(-99).subscribe(Functions.<Integer>emptyConsumer());
assertFalse(d.isDisposed());
d.dispose();
assertTrue(d.isDisposed());
assertFalse(ps.hasObservers());
}
@Test
public void consumerSuccessThrows() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
Single.just(1).subscribe(new Consumer<Integer>() {
@Override
public void accept(Integer t) throws Exception {
throw new TestException();
}
});
TestHelper.assertUndeliverable(list, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void consumerErrorThrows() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
Single.<Integer>error(new TestException("Outer failure")).subscribe(
Functions.<Integer>emptyConsumer(),
new Consumer<Throwable>() {
@Override
public void accept(Throwable t) throws Exception {
throw new TestException("Inner failure");
}
});
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> cel = TestHelper.compositeList(list.get(0));
TestHelper.assertError(cel, 0, TestException.class, "Outer failure");
TestHelper.assertError(cel, 1, TestException.class, "Inner failure");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void biConsumerThrows() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
Single.just(1).subscribe(new BiConsumer<Integer, Throwable>() {
@Override
public void accept(Integer t, Throwable e) throws Exception {
throw new TestException();
}
});
TestHelper.assertUndeliverable(list, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void biConsumerErrorThrows() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
Single.<Integer>error(new TestException("Outer failure")).subscribe(
new BiConsumer<Integer, Throwable>() {
@Override
public void accept(Integer a, Throwable t) throws Exception {
throw new TestException("Inner failure");
}
});
TestHelper.assertError(list, 0, CompositeException.class);
List<Throwable> cel = TestHelper.compositeList(list.get(0));
TestHelper.assertError(cel, 0, TestException.class, "Outer failure");
TestHelper.assertError(cel, 1, TestException.class, "Inner failure");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void methodTestNoCancel() {
PublishSubject<Integer> ps = PublishSubject.create();
ps.single(-99).test(false);
assertTrue(ps.hasObservers());
}
@Test
public void successIsDisposed() {
assertTrue(Single.just(1).subscribe().isDisposed());
}
@Test
public void errorIsDisposed() {
assertTrue(Single.error(new TestException()).subscribe(Functions.emptyConsumer(), Functions.emptyConsumer()).isDisposed());
}
@Test
public void biConsumerIsDisposedOnSuccess() {
final Object[] result = { null, null };
Disposable d = Single.just(1)
.subscribe(new BiConsumer<Integer, Throwable>() {
@Override
public void accept(Integer t1, Throwable t2) throws Exception {
result[0] = t1;
result[1] = t2;
}
});
assertTrue("Not disposed?!", d.isDisposed());
assertEquals(1, result[0]);
assertNull(result[1]);
}
@Test
public void biConsumerIsDisposedOnError() {
final Object[] result = { null, null };
Disposable d = Single.<Integer>error(new IOException())
.subscribe(new BiConsumer<Integer, Throwable>() {
@Override
public void accept(Integer t1, Throwable t2) throws Exception {
result[0] = t1;
result[1] = t2;
}
});
assertTrue("Not disposed?!", d.isDisposed());
assertNull(result[0]);
assertTrue("" + result[1], result[1] instanceof IOException);
}
}
| SingleSubscribeTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestProtosLegacy.java | {
"start": 240908,
"end": 241436
} | interface ____
extends com.google.protobuf.MessageOrBuilder {
// repeated int32 values = 1;
/**
* <code>repeated int32 values = 1;</code>
*/
java.util.List<java.lang.Integer> getValuesList();
/**
* <code>repeated int32 values = 1;</code>
*/
int getValuesCount();
/**
* <code>repeated int32 values = 1;</code>
*/
int getValues(int index);
}
/**
* Protobuf type {@code hadoop.common.ExchangeRequestProto}
*/
public static final | ExchangeRequestProtoOrBuilder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java | {
"start": 1161,
"end": 3749
} | enum ____ {
STANDARD(CachingStrategy.INDEX) {
@Override
protected Analyzer create(IndexVersion version) {
return new StandardAnalyzer(CharArraySet.EMPTY_SET);
}
},
DEFAULT(CachingStrategy.INDEX) {
@Override
protected Analyzer create(IndexVersion version) {
// by calling get analyzer we are ensuring reuse of the same STANDARD analyzer for DEFAULT!
// this call does not create a new instance
return STANDARD.getAnalyzer(version);
}
},
KEYWORD(CachingStrategy.ONE) {
@Override
protected Analyzer create(IndexVersion version) {
return new KeywordAnalyzer();
}
},
STOP {
@Override
protected Analyzer create(IndexVersion version) {
return new StopAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET);
}
},
WHITESPACE {
@Override
protected Analyzer create(IndexVersion version) {
return new WhitespaceAnalyzer();
}
},
SIMPLE {
@Override
protected Analyzer create(IndexVersion version) {
return new SimpleAnalyzer();
}
},
CLASSIC {
@Override
protected Analyzer create(IndexVersion version) {
return new ClassicAnalyzer();
}
};
protected abstract Analyzer create(IndexVersion version);
protected final PreBuiltCacheFactory.PreBuiltCache<Analyzer> cache;
PreBuiltAnalyzers() {
this(PreBuiltCacheFactory.CachingStrategy.LUCENE);
}
PreBuiltAnalyzers(PreBuiltCacheFactory.CachingStrategy cachingStrategy) {
cache = PreBuiltCacheFactory.getCache(cachingStrategy);
}
public PreBuiltCacheFactory.PreBuiltCache<Analyzer> getCache() {
return cache;
}
public synchronized Analyzer getAnalyzer(IndexVersion version) {
Analyzer analyzer = cache.get(version);
if (analyzer == null) {
analyzer = this.create(version);
cache.put(version, analyzer);
}
return analyzer;
}
/**
* Get a pre built Analyzer by its name or fallback to the default one
* @param name Analyzer name
* @param defaultAnalyzer default Analyzer if name not found
*/
public static PreBuiltAnalyzers getOrDefault(String name, PreBuiltAnalyzers defaultAnalyzer) {
try {
return valueOf(name.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
return defaultAnalyzer;
}
}
}
| PreBuiltAnalyzers |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/RestartStrategyDescriptionUtils.java | {
"start": 1073,
"end": 1134
} | class ____ describing restart strategies. */
@Internal
public | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/manytomany/ManyToManyHqlMemberOfQueryTest.java | {
"start": 5973,
"end": 6814
} | class ____ {
@Id
private Long id;
@Column(name = "phone_number")
private String number;
@OneToMany(mappedBy = "phone", cascade = CascadeType.ALL, orphanRemoval = true)
private List<Call> calls = new ArrayList<>();
@ElementCollection
private List<Date> repairTimestamps = new ArrayList<>();
public Phone() {
}
public Phone(String number) {
this.number = number;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getNumber() {
return number;
}
public List<Call> getCalls() {
return calls;
}
public List<Date> getRepairTimestamps() {
return repairTimestamps;
}
public void addCall(Call call) {
calls.add( call );
call.setPhone( this );
}
}
@Entity(name = "Call")
@Table(name = "phone_call")
public static | Phone |
java | apache__camel | components/camel-activemq6/src/test/java/org/apache/camel/component/activemq6/ActiveMQToDIT.java | {
"start": 1247,
"end": 2664
} | class ____ extends ActiveMQITSupport {
private ProducerTemplate template;
@Test
public void testToD() throws Exception {
contextExtension.getMockEndpoint("mock:bar").expectedBodiesReceived("Hello bar");
contextExtension.getMockEndpoint("mock:beer").expectedBodiesReceived("Hello beer");
template.sendBodyAndHeader("direct:start", "Hello bar", "where", "bar");
template.sendBodyAndHeader("direct:start", "Hello beer", "where", "beer");
MockEndpoint.assertIsSatisfied(contextExtension.getContext());
}
@BeforeEach
void setupTemplate() {
template = contextExtension.getProducerTemplate();
}
@ContextFixture
public void configureContext(CamelContext camelContext) {
camelContext.addComponent("activemq6", activeMQComponent(service.defaultEndpoint()));
}
@RouteFixture
public void createRouteBuilder(CamelContext context) throws Exception {
context.addRoutes(createRouteBuilder());
}
private RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// route message dynamic using toD
from("direct:start").toD("activemq6:queue:${header.where}");
from("activemq6:queue:bar").to("mock:bar");
from("activemq6:queue:beer").to("mock:beer");
}
};
}
}
| ActiveMQToDIT |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java | {
"start": 18122,
"end": 36676
} | class ____ for" +
" " + sName);
}
// TODO better use s.getName()?
if (!sName.equals(s.getName())) {
LOG.warn("The Auxiliary Service named '" + sName + "' in the "
+ "configuration is for " + s.getClass() + " which has "
+ "a name of '" + s.getName() + "'. Because these are "
+ "not the same tools trying to send ServiceData and read "
+ "Service Meta Data may have issues unless the refer to "
+ "the name in the config.");
}
s.setAuxiliaryLocalPathHandler(auxiliaryLocalPathHandler);
setStateStoreDir(sName, s);
Configuration customConf = new Configuration(conf);
if (service.getConfiguration() != null) {
for (Entry<String, String> entry : service.getConfiguration()
.getProperties().entrySet()) {
customConf.set(entry.getKey(), entry.getValue());
}
}
s.init(customConf);
LOG.info("Initialized auxiliary service " + sName);
} catch (RuntimeException e) {
LOG.error("Failed to initialize " + sName, e);
throw e;
} catch (ClassNotFoundException e) {
throw new YarnRuntimeException(e);
}
return s;
}
/**
* Reloads auxiliary services manifest. Must be called after service init.
*
* @throws IOException if manifest can't be loaded
*/
@VisibleForTesting
protected void reloadManifest() throws IOException {
loadManifest(getConfig(), true);
}
/**
* Reloads auxiliary services. Must be called after service init.
*
* @param services a list of auxiliary services
* @throws IOException if aux services have not been started yet or dynamic
* reloading is not enabled
*/
public synchronized void reload(AuxServiceRecords services) throws
IOException {
if (!manifestEnabled) {
throw new IOException("Dynamic reloading is not enabled via " +
YarnConfiguration.NM_AUX_SERVICES_MANIFEST_ENABLED);
}
if (getServiceState() != Service.STATE.STARTED) {
throw new IOException("Auxiliary services have not been started yet, " +
"please retry later");
}
LOG.info("Received list of auxiliary services: " + mapper
.writeValueAsString(services));
loadServices(services, getConfig(), true);
}
@VisibleForTesting
boolean checkManifestPermissions(FileStatus status) throws
IOException {
if ((status.getPermission().toShort() & 0022) != 0) {
LOG.error("Manifest file and parents must not be writable by group or " +
"others. The current Permission of " + status.getPath() + " is " +
status.getPermission());
return false;
}
Path parent = status.getPath().getParent();
if (parent == null) {
return true;
}
return checkManifestPermissions(getManifestFS().getFileStatus(parent));
}
private boolean checkManifestOwnerAndPermissions(FileStatus status) throws
IOException {
AccessControlList yarnAdminAcl = new AccessControlList(getConfig().get(
YarnConfiguration.YARN_ADMIN_ACL,
YarnConfiguration.DEFAULT_YARN_ADMIN_ACL));
if (!yarnAdminAcl.isUserAllowed(
UserGroupInformation.createRemoteUser(status.getOwner()))) {
LOG.error("Manifest must be owned by YARN admin: " + manifest);
return false;
}
return checkManifestPermissions(status);
}
/**
* Reads the manifest file if it is configured, exists, and has not been
* modified since the last read.
*
* @return aux service records
* @throws IOException
*/
private synchronized AuxServiceRecords maybeReadManifestFile() throws
IOException {
if (manifest == null) {
return null;
}
if (!manifestFS.exists(manifest)) {
LOG.warn("Manifest file " + manifest + " doesn't exist");
return null;
}
FileStatus status;
try {
status = manifestFS.getFileStatus(manifest);
} catch (FileNotFoundException e) {
LOG.warn("Manifest file " + manifest + " doesn't exist");
return null;
}
if (!status.isFile()) {
LOG.warn("Manifest file " + manifest + " is not a file");
}
if (!checkManifestOwnerAndPermissions(status)) {
return null;
}
if (status.getModificationTime() == manifestModifyTS) {
return null;
}
manifestModifyTS = status.getModificationTime();
LOG.info("Reading auxiliary services manifest " + manifest);
try (FSDataInputStream in = manifestFS.open(manifest)) {
return mapper.readValue((InputStream) in, AuxServiceRecords.class);
}
}
/**
* Updates current aux services based on changes found in the manifest.
*
* @param conf configuration
* @param startServices if true starts services, otherwise only inits services
* @throws IOException
*/
@VisibleForTesting
protected synchronized void loadManifest(Configuration conf, boolean
startServices) throws IOException {
if (!manifestEnabled) {
throw new IOException("Dynamic reloading is not enabled via " +
YarnConfiguration.NM_AUX_SERVICES_MANIFEST_ENABLED);
}
if (manifest == null) {
return;
}
if (!manifestFS.exists(manifest)) {
if (serviceMap.isEmpty()) {
return;
}
LOG.info("Manifest file " + manifest + " doesn't exist, stopping " +
"auxiliary services");
Set<String> servicesToRemove = new HashSet<>(serviceMap.keySet());
for (String sName : servicesToRemove) {
maybeRemoveAuxService(sName);
}
return;
}
AuxServiceRecords services = maybeReadManifestFile();
loadServices(services, conf, startServices);
}
/**
* Updates current aux services based on changes found in the service list.
*
* @param services list of auxiliary services
* @param conf configuration
* @param startServices if true starts services, otherwise only inits services
* @throws IOException
*/
private synchronized void loadServices(AuxServiceRecords services,
Configuration conf, boolean startServices) throws IOException {
if (services == null) {
// read did not occur or no changes detected
return;
}
Set<String> loadedAuxServices = new HashSet<>();
boolean foundChanges = false;
if (services.getServices() != null) {
for (AuxServiceRecord service : services.getServices()) {
AuxServiceRecord existingService = serviceRecordMap.get(service
.getName());
loadedAuxServices.add(service.getName());
if (existingService != null && existingService.equals(service)) {
LOG.debug("Auxiliary service already loaded: {}", service.getName());
continue;
}
foundChanges = true;
try {
// stop aux service
maybeRemoveAuxService(service.getName());
// init aux service
AuxiliaryService s = initAuxService(service, conf, false);
if (startServices) {
// start aux service
startAuxService(service.getName(), s, service);
}
// add aux service to serviceMap
addService(service.getName(), s, service);
} catch (IOException e) {
LOG.error("Failed to load auxiliary service " + service.getName());
}
}
}
// remove aux services that do not appear in the new list
Set<String> servicesToRemove = new HashSet<>(serviceMap.keySet());
servicesToRemove.removeAll(loadedAuxServices);
for (String sName : servicesToRemove) {
foundChanges = true;
maybeRemoveAuxService(sName);
}
if (!foundChanges) {
LOG.info("No auxiliary services changes detected");
}
}
private static String getClassName(AuxServiceRecord service) {
AuxServiceConfiguration serviceConf = service.getConfiguration();
if (serviceConf == null) {
return null;
}
return serviceConf.getProperty(CLASS_NAME);
}
private static String[] getSystemClasses(AuxServiceRecord service) {
AuxServiceConfiguration serviceConf = service.getConfiguration();
if (serviceConf == null || serviceConf.getProperty(SYSTEM_CLASSES) == null) {
return new String[]{};
}
return StringUtils.split(serviceConf.getProperty(SYSTEM_CLASSES));
}
/**
* Translates an aux service specified in the Configuration to an aux
* service record.
*
* @param sName aux service name
* @param conf configuration
* @return
*/
private static AuxServiceRecord createServiceRecordFromConfiguration(String
sName, Configuration conf) {
String className = conf.get(String.format(
YarnConfiguration.NM_AUX_SERVICE_FMT, sName));
String remoteClassPath = conf.get(String.format(
YarnConfiguration.NM_AUX_SERVICE_REMOTE_CLASSPATH, sName));
String[] systemClasses = conf.getTrimmedStrings(String.format(
YarnConfiguration.NM_AUX_SERVICES_SYSTEM_CLASSES, sName));
AuxServiceConfiguration serviceConf = new AuxServiceConfiguration();
if (className != null) {
serviceConf.setProperty(CLASS_NAME, className);
}
if (systemClasses != null) {
serviceConf.setProperty(SYSTEM_CLASSES, StringUtils.join(",",
systemClasses));
}
if (remoteClassPath != null) {
AuxServiceFile.TypeEnum type;
String lcClassPath = StringUtils.toLowerCase(remoteClassPath);
if (lcClassPath.endsWith(".jar")) {
type = AuxServiceFile.TypeEnum.STATIC;
} else if (lcClassPath.endsWith(".zip") ||
lcClassPath.endsWith(".tar.gz") || lcClassPath.endsWith(".tgz") ||
lcClassPath.endsWith(".tar")) {
type = AuxServiceFile.TypeEnum.ARCHIVE;
} else {
throw new YarnRuntimeException("Cannot unpack file from " +
"remote-file-path:" + remoteClassPath + "for aux-service:" +
sName + ".\n");
}
AuxServiceFile file = new AuxServiceFile().srcFile(remoteClassPath)
.type(type);
serviceConf.getFiles().add(file);
}
return new AuxServiceRecord().name(sName).configuration(serviceConf);
}
@Override
public synchronized void serviceInit(Configuration conf) throws Exception {
boolean recoveryEnabled = conf.getBoolean(
YarnConfiguration.NM_RECOVERY_ENABLED,
YarnConfiguration.DEFAULT_NM_RECOVERY_ENABLED);
if (recoveryEnabled) {
stateStoreRoot = new Path(conf.get(YarnConfiguration.NM_RECOVERY_DIR),
STATE_STORE_ROOT_NAME);
stateStoreFs = FileSystem.getLocal(conf);
}
manifestEnabled = conf.getBoolean(
YarnConfiguration.NM_AUX_SERVICES_MANIFEST_ENABLED,
YarnConfiguration.DEFAULT_NM_AUX_SERVICES_MANIFEST_ENABLED);
if (!manifestEnabled) {
Collection<String> auxNames = conf.getStringCollection(
YarnConfiguration.NM_AUX_SERVICES);
for (final String sName : auxNames) {
AuxServiceRecord service = createServiceRecordFromConfiguration(sName,
conf);
maybeRemoveAuxService(sName);
AuxiliaryService s = initAuxService(service, conf, true);
addService(sName, s, service);
}
} else {
String manifestStr = conf.get(YarnConfiguration.NM_AUX_SERVICES_MANIFEST);
if (manifestStr != null) {
manifest = new Path(manifestStr);
manifestFS = FileSystem.get(new URI(manifestStr), conf);
loadManifest(conf, false);
manifestReloadInterval = conf.getLong(
YarnConfiguration.NM_AUX_SERVICES_MANIFEST_RELOAD_MS,
YarnConfiguration.DEFAULT_NM_AUX_SERVICES_MANIFEST_RELOAD_MS);
manifestReloadTask = new ManifestReloadTask();
} else {
LOG.info("Auxiliary services manifest is enabled, but no manifest " +
"file is specified in the configuration.");
}
}
super.serviceInit(conf);
}
private void startAuxService(String name, AuxiliaryService service,
AuxServiceRecord serviceRecord) {
service.start();
service.registerServiceListener(this);
ByteBuffer meta = service.getMetaData();
if (meta != null) {
serviceMetaData.put(name, meta);
}
serviceRecord.setLaunchTime(new Date());
}
private void stopAuxService(Service service) {
if (service.getServiceState() == Service.STATE.STARTED) {
service.unregisterServiceListener(this);
service.stop();
}
}
@Override
public synchronized void serviceStart() throws Exception {
// TODO fork(?) services running as configured user
// monitor for health, shutdown/restart(?) if any should die
for (Map.Entry<String, AuxiliaryService> entry : serviceMap.entrySet()) {
AuxiliaryService service = entry.getValue();
String name = entry.getKey();
startAuxService(name, service, serviceRecordMap.get(name));
}
if (manifestEnabled && manifest != null && manifestReloadInterval > 0) {
LOG.info("Scheduling reloading auxiliary services manifest file at " +
"interval " + manifestReloadInterval + " ms");
manifestReloadTimer = new Timer("AuxServicesManifestReload-Timer",
true);
manifestReloadTimer.schedule(manifestReloadTask,
manifestReloadInterval, manifestReloadInterval);
}
super.serviceStart();
}
@Override
public synchronized void serviceStop() throws Exception {
try {
for (Service service : serviceMap.values()) {
stopAuxService(service);
}
serviceMap.clear();
serviceRecordMap.clear();
serviceMetaData.clear();
if (manifestFS != null) {
manifestFS.close();
}
if (manifestReloadTimer != null) {
manifestReloadTimer.cancel();
}
} finally {
super.serviceStop();
}
}
@Override
public void stateChanged(Service service) {
// services changing state is expected on reload
LOG.info("Service " + service.getName() + " changed state: " +
service.getServiceState());
}
@Override
public void handle(AuxServicesEvent event) {
LOG.info("Got event " + event.getType() + " for appId "
+ event.getApplicationID());
switch (event.getType()) {
case APPLICATION_INIT:
LOG.info("Got APPLICATION_INIT for service " + event.getServiceID());
AuxiliaryService service = null;
try {
service = serviceMap.get(event.getServiceID());
service
.initializeApplication(new ApplicationInitializationContext(event
.getUser(), event.getApplicationID(), event.getServiceData()));
} catch (Throwable th) {
logWarningWhenAuxServiceThrowExceptions(service,
AuxServicesEventType.APPLICATION_INIT, th);
}
break;
case APPLICATION_STOP:
for (AuxiliaryService serv : serviceMap.values()) {
try {
serv.stopApplication(new ApplicationTerminationContext(event
.getApplicationID()));
} catch (Throwable th) {
logWarningWhenAuxServiceThrowExceptions(serv,
AuxServicesEventType.APPLICATION_STOP, th);
}
}
break;
case CONTAINER_INIT:
for (AuxiliaryService serv : serviceMap.values()) {
try {
serv.initializeContainer(new ContainerInitializationContext(
event.getContainer().getUser(),
event.getContainer().getContainerId(),
event.getContainer().getResource(), event.getContainer()
.getContainerTokenIdentifier().getContainerType()));
} catch (Throwable th) {
logWarningWhenAuxServiceThrowExceptions(serv,
AuxServicesEventType.CONTAINER_INIT, th);
}
}
break;
case CONTAINER_STOP:
for (AuxiliaryService serv : serviceMap.values()) {
try {
serv.stopContainer(new ContainerTerminationContext(
event.getUser(), event.getContainer().getContainerId(),
event.getContainer().getResource(), event.getContainer()
.getContainerTokenIdentifier().getContainerType()));
} catch (Throwable th) {
logWarningWhenAuxServiceThrowExceptions(serv,
AuxServicesEventType.CONTAINER_STOP, th);
}
}
break;
default:
throw new RuntimeException("Unknown type: " + event.getType());
}
}
private boolean validateAuxServiceName(String name) {
if (name == null || name.trim().isEmpty()) {
return false;
}
return p.matcher(name).matches();
}
private void logWarningWhenAuxServiceThrowExceptions(AuxiliaryService service,
AuxServicesEventType eventType, Throwable th) {
LOG.warn((null == service ? "The auxService is null"
: "The auxService name is " + service.getName())
+ " and it got an error at event: " + eventType, th);
}
FileContext getLocalFileContext(Configuration conf) {
try {
return FileContext.getLocalFSFileContext(conf);
} catch (IOException e) {
throw new YarnRuntimeException("Failed to access local fs");
}
}
FileContext getRemoteFileContext(final URI path, Configuration conf) {
try {
return FileContext.getFileContext(path, conf);
} catch (IOException e) {
throw new YarnRuntimeException("Failed to access remote fs");
}
}
private UserGroupInformation getRemoteUgi() {
UserGroupInformation remoteUgi;
try {
remoteUgi = UserGroupInformation.getCurrentUser();
} catch (IOException e) {
String msg = "Cannot obtain the user-name. Got exception: "
+ StringUtils.stringifyException(e);
LOG.warn(msg);
throw new YarnRuntimeException(msg);
}
return remoteUgi;
}
protected static AuxServiceRecord newAuxService(String name, String
className) {
AuxServiceConfiguration serviceConf = new AuxServiceConfiguration();
serviceConf.setProperty(CLASS_NAME, className);
return new AuxServiceRecord().name(name).configuration(serviceConf);
}
protected static void setClasspath(AuxServiceRecord service, String
classpath) {
service.getConfiguration().getFiles().add(new AuxServiceFile()
.srcFile(classpath).type(AuxServiceFile.TypeEnum.STATIC));
}
protected static void setSystemClasses(AuxServiceRecord service, String
systemClasses) {
service.getConfiguration().setProperty(SYSTEM_CLASSES, systemClasses);
}
protected FileSystem getManifestFS() {
return manifestFS;
}
/**
* Class which is used by the {@link Timer} | loaded |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanInfoAMoreComplexOverloadedTest.java | {
"start": 1253,
"end": 3094
} | class ____ extends ContextTestSupport {
@Test
public void testRequestA() {
BeanInfo beanInfo = new BeanInfo(context, Bean.class);
Message message = new DefaultMessage(context);
message.setBody(new RequestA());
Exchange exchange = new DefaultExchange(context);
exchange.setIn(message);
MethodInvocation methodInvocation = beanInfo.createInvocation(new Bean(), exchange);
Method method = methodInvocation.getMethod();
assertEquals("doSomething", method.getName());
assertEquals(RequestA.class, method.getGenericParameterTypes()[0]);
}
@Test
public void testRequestB() {
BeanInfo beanInfo = new BeanInfo(context, Bean.class);
Message message = new DefaultMessage(context);
message.setBody(new RequestB());
Exchange exchange = new DefaultExchange(context);
exchange.setIn(message);
MethodInvocation methodInvocation = beanInfo.createInvocation(new Bean(), exchange);
Method method = methodInvocation.getMethod();
assertEquals("doSomething", method.getName());
assertEquals(RequestB.class, method.getGenericParameterTypes()[0]);
}
@Test
public void testAmbigious() {
BeanInfo beanInfo = new BeanInfo(context, Bean.class);
Message message = new DefaultMessage(context);
message.setBody("Hello World");
Exchange exchange = new DefaultExchange(context);
exchange.setIn(message);
AmbiguousMethodCallException e = assertThrows(AmbiguousMethodCallException.class,
() -> beanInfo.createInvocation(new Bean(), exchange),
"Should have thrown an exception");
assertEquals(2, e.getMethods().size());
}
@SuppressWarnings("Unused")
static | BeanInfoAMoreComplexOverloadedTest |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/tags/form/RadioButtonTagTests.java | {
"start": 1397,
"end": 9281
} | class ____ extends AbstractFormTagTests {
private RadioButtonTag tag;
private TestBean bean;
@Override
@SuppressWarnings("serial")
protected void onSetUp() {
this.tag = new RadioButtonTag() {
@Override
protected TagWriter createTagWriter() {
return new TagWriter(getWriter());
}
};
this.tag.setPageContext(getPageContext());
}
@Test
void withCheckedValue() throws Exception {
String dynamicAttribute1 = "attr1";
String dynamicAttribute2 = "attr2";
this.tag.setPath("sex");
this.tag.setValue("M");
this.tag.setDynamicAttribute(null, dynamicAttribute1, dynamicAttribute1);
this.tag.setDynamicAttribute(null, dynamicAttribute2, dynamicAttribute2);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
assertTagOpened(output);
assertTagClosed(output);
assertContainsAttribute(output, "name", "sex");
assertContainsAttribute(output, "type", "radio");
assertContainsAttribute(output, "value", "M");
assertContainsAttribute(output, "checked", "checked");
assertContainsAttribute(output, dynamicAttribute1, dynamicAttribute1);
assertContainsAttribute(output, dynamicAttribute2, dynamicAttribute2);
}
@Test
void withCheckedValueAndDynamicAttributes() throws Exception {
this.tag.setPath("sex");
this.tag.setValue("M");
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
assertTagOpened(output);
assertTagClosed(output);
assertContainsAttribute(output, "name", "sex");
assertContainsAttribute(output, "type", "radio");
assertContainsAttribute(output, "value", "M");
assertContainsAttribute(output, "checked", "checked");
}
@Test
void withCheckedObjectValue() throws Exception {
this.tag.setPath("myFloat");
this.tag.setValue(getFloat());
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
assertTagOpened(output);
assertTagClosed(output);
assertContainsAttribute(output, "name", "myFloat");
assertContainsAttribute(output, "type", "radio");
assertContainsAttribute(output, "value", getFloat().toString());
assertContainsAttribute(output, "checked", "checked");
}
@Test
void withCheckedObjectValueAndEditor() throws Exception {
this.tag.setPath("myFloat");
this.tag.setValue("F12.99");
BeanPropertyBindingResult bindingResult = new BeanPropertyBindingResult(this.bean, COMMAND_NAME);
MyFloatEditor editor = new MyFloatEditor();
bindingResult.getPropertyEditorRegistry().registerCustomEditor(Float.class, editor);
getPageContext().getRequest().setAttribute(BindingResult.MODEL_KEY_PREFIX + COMMAND_NAME, bindingResult);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
assertTagOpened(output);
assertTagClosed(output);
assertContainsAttribute(output, "name", "myFloat");
assertContainsAttribute(output, "type", "radio");
assertContainsAttribute(output, "value", "F" + getFloat());
assertContainsAttribute(output, "checked", "checked");
}
@Test
void withUncheckedObjectValue() throws Exception {
Float value = Float.valueOf("99.45");
this.tag.setPath("myFloat");
this.tag.setValue(value);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
assertTagOpened(output);
assertTagClosed(output);
assertContainsAttribute(output, "name", "myFloat");
assertContainsAttribute(output, "type", "radio");
assertContainsAttribute(output, "value", value.toString());
assertAttributeNotPresent(output, "checked");
}
@Test
void withUncheckedValue() throws Exception {
this.tag.setPath("sex");
this.tag.setValue("F");
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
assertTagOpened(output);
assertTagClosed(output);
assertContainsAttribute(output, "name", "sex");
assertContainsAttribute(output, "type", "radio");
assertContainsAttribute(output, "value", "F");
assertAttributeNotPresent(output, "checked");
}
@Test
void collectionOfPets() throws Exception {
this.tag.setPath("pets");
this.tag.setValue(new Pet("Rudiger"));
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element checkboxElement = document.getRootElement().elements().get(0);
assertThat(checkboxElement.getName()).isEqualTo("input");
assertThat(checkboxElement.attribute("type").getValue()).isEqualTo("radio");
assertThat(checkboxElement.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement.attribute("value").getValue()).isEqualTo("Rudiger");
assertThat(checkboxElement.attribute("checked").getValue()).isEqualTo("checked");
}
@Test
void collectionOfPetsNotSelected() throws Exception {
this.tag.setPath("pets");
this.tag.setValue(new Pet("Santa's Little Helper"));
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element checkboxElement = document.getRootElement().elements().get(0);
assertThat(checkboxElement.getName()).isEqualTo("input");
assertThat(checkboxElement.attribute("type").getValue()).isEqualTo("radio");
assertThat(checkboxElement.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement.attribute("value").getValue()).isEqualTo("Santa's Little Helper");
assertThat(checkboxElement.attribute("checked")).isNull();
}
@Test
void collectionOfPetsWithEditor() throws Exception {
this.tag.setPath("pets");
this.tag.setValue(new ItemPet("Rudiger"));
BeanPropertyBindingResult bindingResult = new BeanPropertyBindingResult(this.bean, COMMAND_NAME);
PropertyEditorSupport editor = new ItemPet.CustomEditor();
bindingResult.getPropertyEditorRegistry().registerCustomEditor(ItemPet.class, editor);
getPageContext().getRequest().setAttribute(BindingResult.MODEL_KEY_PREFIX + COMMAND_NAME, bindingResult);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element checkboxElement = document.getRootElement().elements().get(0);
assertThat(checkboxElement.getName()).isEqualTo("input");
assertThat(checkboxElement.attribute("type").getValue()).isEqualTo("radio");
assertThat(checkboxElement.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement.attribute("value").getValue()).isEqualTo("Rudiger");
assertThat(checkboxElement.attribute("checked").getValue()).isEqualTo("checked");
}
@Test
void dynamicTypeAttribute() {
assertThatIllegalArgumentException().isThrownBy(() ->
this.tag.setDynamicAttribute(null, "type", "email"))
.withMessage("Attribute type=\"email\" is not allowed");
}
private void assertTagOpened(String output) {
assertThat(output).contains("<input ");
}
private void assertTagClosed(String output) {
assertThat(output).contains("/>");
}
private Float getFloat() {
return Float.valueOf("12.99");
}
@Override
protected TestBean createTestBean() {
this.bean = new TestBean();
bean.setSex("M");
bean.setMyFloat(getFloat());
bean.setPets(Collections.singletonList(new Pet("Rudiger")));
return bean;
}
private static | RadioButtonTagTests |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/builder/BuilderSupport.java | {
"start": 1546,
"end": 18613
} | class ____ implements CamelContextAware {
private CamelContext camelContext;
private ErrorHandlerFactory errorHandlerFactory;
protected BuilderSupport() {
}
protected BuilderSupport(CamelContext context) {
this.camelContext = context;
}
// Builder methods
// -------------------------------------------------------------------------
/**
* Returns a value builder for the given expression
*/
public ValueBuilder expression(Expression exp) {
return new ValueBuilder(exp);
}
/**
* Returns a value builder for the given header
*/
public ValueBuilder header(String name) {
return Builder.header(name);
}
/**
* Returns a value builder for the given exchange property
*/
public ValueBuilder exchangeProperty(String name) {
return Builder.exchangeProperty(name);
}
/**
* Returns a predicate and value builder for the inbound body on an exchange
*/
public ValueBuilder body() {
return Builder.body();
}
/**
* Returns a predicate and value builder for the inbound message body as a specific type
*/
public <T> ValueBuilder bodyAs(Class<T> type) {
return Builder.bodyAs(type);
}
/**
* Returns a value builder for the given variable
*/
public ValueBuilder variable(String name) {
return Builder.variable(name);
}
/**
* Returns a value builder for the given system property
*/
public ValueBuilder systemProperty(String name) {
return Builder.systemProperty(name);
}
/**
* Returns a value builder for the given system property
*/
public ValueBuilder systemProperty(String name, String defaultValue) {
return Builder.systemProperty(name, defaultValue);
}
/**
* Returns a constant expression value builder
*/
public ValueBuilder constant(Object value) {
return Builder.constant(value);
}
/**
* Returns a constant expression value builder
*/
public ValueBuilder constant(Object value, boolean trim) {
return Builder.constant(value, trim);
}
/**
* Returns a constant expression value builder
*/
public ValueBuilder constant(Object... value) {
return Builder.constant(value);
}
/**
* Returns a JOOR expression value builder
*
* @deprecated use java instead
*/
@Deprecated(since = "4.3.0")
public ValueBuilder joor(String value) {
return Builder.joor(value);
}
/**
* Returns a JOOR expression value builder
*
* @deprecated use java instead
*/
@Deprecated(since = "4.3.0")
public ValueBuilder joor(String value, Class<?> resultType) {
return Builder.joor(value, resultType);
}
/**
* Returns a Java expression value builder
*/
public ValueBuilder java(String value) {
return Builder.java(value);
}
/**
* Returns a Java expression value builder
*/
public ValueBuilder java(String value, Class<?> resultType) {
return Builder.java(value, resultType);
}
/**
* Returns a JSonPath expression value builder
*/
public ValueBuilder jsonpath(String value) {
return Builder.jsonpath(value);
}
/**
* Returns a JSonPath expression value builder
*
* @param value The JSonPath expression
* @param resultType The result type that the JSonPath expression will return.
*/
public ValueBuilder jsonpath(String value, Class<?> resultType) {
return Builder.jsonpath(value, resultType);
}
/**
* Returns a JQ expression value builder
*/
public ValueBuilder jq(String value) {
return Builder.jq(value);
}
/**
* Returns a JQ expression value builder
*/
public ValueBuilder jq(String value, Class<?> resultType) {
return Builder.jq(value, resultType);
}
/**
* Returns a compiled simple expression value builder
*/
public ValueBuilder csimple(String value) {
return Builder.csimple(value);
}
/**
* Returns a compiled simple expression value builder
*/
public ValueBuilder csimple(String value, Class<?> resultType) {
return Builder.csimple(value, resultType);
}
/**
* Returns a datasonnet expression value builder
*/
public ValueBuilder datasonnet(String value) {
return datasonnet(value, null);
}
/**
* Returns a datasonnet expression value builder
*/
public ValueBuilder datasonnet(String value, Class<?> resultType) {
return datasonnet(value, resultType, null, null);
}
/**
* Returns a datasonnet expression value builder
*/
public ValueBuilder datasonnet(String value, Class<?> resultType, String bodyMediaType, String outputMediaType) {
DatasonnetExpression exp = new DatasonnetExpression(value);
exp.setResultType(resultType);
exp.setBodyMediaType(bodyMediaType);
exp.setOutputMediaType(outputMediaType);
return new ValueBuilder(exp);
}
/**
* Returns a simple expression value builder
*/
public ValueBuilder simple(String value) {
return simple(value, null);
}
/**
* Returns a simple expression value builder
*/
public ValueBuilder simple(String value, Class<?> resultType) {
return Builder.simple(value, resultType);
}
/**
* Returns a simple expression value builder, using String.format style
*/
public ValueBuilder simpleF(String format, Object... values) {
String exp = String.format(format, values);
return simple(exp);
}
/**
* Returns a simple expression value builder, using String.format style
*/
public ValueBuilder simpleF(String format, Class<?> resultType, Object... values) {
String exp = String.format(format, values);
return simple(exp, resultType);
}
/**
* Returns a xpath expression value builder
*
* @param value the XPath expression
* @return the builder
*/
public ValueBuilder xpath(String value) {
return xpath(value, null, null);
}
/**
* Returns a xpath expression value builder
*
* @param value the XPath expression
* @param resultType the result type that the XPath expression will return.
* @return the builder
*/
public ValueBuilder xpath(String value, Class<?> resultType) {
return xpath(value, resultType, null);
}
/**
* Returns a xpath expression value builder
*
* @param value the XPath expression
* @param namespaces namespace mappings
* @return the builder
*/
public ValueBuilder xpath(String value, Namespaces namespaces) {
return xpath(value, null, namespaces);
}
/**
* Returns a xpath expression value builder
*
* @param value the XPath expression
* @param resultType the result type that the XPath expression will return.
* @param namespaces namespace mappings
* @return the builder
*/
public ValueBuilder xpath(String value, Class<?> resultType, Namespaces namespaces) {
// the value may contain property placeholders as it may be used
// directly from Java DSL
try {
value = getContext().resolvePropertyPlaceholders(value);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
XPathExpression exp = new XPathExpression(value);
exp.setResultType(resultType);
if (namespaces != null) {
exp.setNamespaces(namespaces.getNamespaces());
}
return new ValueBuilder(exp);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a> value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a reference to a bean (String).
*
* @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry
* @return the builder
*/
public ValueBuilder method(Object beanOrBeanRef) {
return method(beanOrBeanRef, null);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a> value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a reference to a bean (String).
*
* @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry
* @param method name of method to invoke
* @return the builder
*/
public ValueBuilder method(Object beanOrBeanRef, String method) {
return Builder.method(beanOrBeanRef, method);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a> value builder
*
* @param beanType the Class of the bean which we want to invoke
* @return the builder
*/
public ValueBuilder method(Class<?> beanType) {
return Builder.method(beanType);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a> value builder
*
* @param beanType the Class of the bean which we want to invoke
* @param method name of method to invoke
* @return the builder
*/
public ValueBuilder method(Class<?> beanType, String method) {
return Builder.method(beanType, method);
}
/**
* Returns an expression value builder that replaces all occurrences of the regular expression with the given
* replacement
*/
public ValueBuilder regexReplaceAll(Expression content, String regex, String replacement) {
return Builder.regexReplaceAll(content, regex, replacement);
}
/**
* Returns an expression value builder that replaces all occurrences of the regular expression with the given
* replacement
*/
public ValueBuilder regexReplaceAll(Expression content, String regex, Expression replacement) {
return Builder.regexReplaceAll(content, regex, replacement);
}
/**
* Returns a exception expression value builder
*/
public ValueBuilder exceptionMessage() {
return Builder.exceptionMessage();
}
/**
* Resolves the given URI to an endpoint
*
* @param uri the uri to resolve
* @throws NoSuchEndpointException if the endpoint URI could not be resolved
* @return the endpoint
*/
public Endpoint endpoint(String uri) throws NoSuchEndpointException {
ObjectHelper.notNull(uri, "uri");
Endpoint endpoint = getContext().getEndpoint(uri);
if (endpoint == null) {
throw new NoSuchEndpointException(uri);
}
return endpoint;
}
/**
* Resolves the given URI to an endpoint of the specified type
*
* @param uri the uri to resolve
* @param type the excepted type of the endpoint
* @throws NoSuchEndpointException if the endpoint URI could not be resolved
* @return the endpoint
*/
public <T extends Endpoint> T endpoint(String uri, Class<T> type) throws NoSuchEndpointException {
ObjectHelper.notNull(uri, "uri");
T endpoint = getContext().getEndpoint(uri, type);
if (endpoint == null) {
throw new NoSuchEndpointException(uri);
}
return endpoint;
}
/**
* Creates a default <a href="http://camel.apache.org/error-handler.html">error handler</a>.
*
* @return the builder
*/
public DefaultErrorHandlerBuilder defaultErrorHandler() {
return new DefaultErrorHandlerBuilder();
}
/**
* Creates a disabled <a href="http://camel.apache.org/error-handler.html">error handler</a> for removing the
* default error handler
*
* @return the builder
*/
public NoErrorHandlerBuilder noErrorHandler() {
return new NoErrorHandlerBuilder();
}
/**
* <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter Channel EIP:</a> is a error handler for
* handling messages that could not be delivered to it's intended destination.
*
* @param deadLetterUri uri to the dead letter endpoint storing dead messages
* @return the builder
*/
public DeadLetterChannelBuilder deadLetterChannel(String deadLetterUri) {
DeadLetterChannelBuilder answer = new DeadLetterChannelBuilder();
answer.setDeadLetterUri(deadLetterUri);
return answer;
}
/**
* <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter Channel EIP:</a> is a error handler for
* handling messages that could not be delivered to it's intended destination.
*
* @param deadLetterEndpoint dead letter endpoint storing dead messages
* @return the builder
*/
public DeadLetterChannelBuilder deadLetterChannel(Endpoint deadLetterEndpoint) {
return deadLetterChannel(deadLetterEndpoint.getEndpointUri());
}
/**
* Error handler using JTA transactions (requires camel-jta).
*
* @return the builder
*/
public JtaTransactionErrorHandlerBuilder jtaTransactionErrorHandler() {
return new JtaTransactionErrorHandlerBuilder();
}
/**
* Error handler using JTA transactions (requires camel-jta).
*
* @param policy the transaction policy
* @return the builder
*/
public JtaTransactionErrorHandlerBuilder jtaTransactionErrorHandler(TransactedPolicy policy) {
JtaTransactionErrorHandlerBuilder answer = new JtaTransactionErrorHandlerBuilder();
answer.setTransactedPolicy(policy);
return answer;
}
/**
* Error handler using JTA transactions (requires camel-jta).
*
* @param policyRef references to the transaction policy
* @return the builder
*/
public JtaTransactionErrorHandlerBuilder jtaTransactionErrorHandler(String policyRef) {
JtaTransactionErrorHandlerBuilder answer = new JtaTransactionErrorHandlerBuilder();
answer.setTransactedPolicyRef(policyRef);
return answer;
}
/**
* Error handler using Spring transactions (requires camel-spring).
*
* @return the builder
*/
public SpringTransactionErrorHandlerBuilder springTransactionErrorHandler() {
return new SpringTransactionErrorHandlerBuilder();
}
/**
* Error handler using Spring transactions (requires camel-spring).
*
* @param policy the transaction policy
* @return the builder
*/
public SpringTransactionErrorHandlerBuilder springTransactionErrorHandler(TransactedPolicy policy) {
SpringTransactionErrorHandlerBuilder answer = new SpringTransactionErrorHandlerBuilder();
answer.setTransactedPolicy(policy);
return answer;
}
/**
* Error handler using Spring transactions (requires camel-spring).
*
* @param policyRef references to the transaction policy
* @return the builder
*/
public SpringTransactionErrorHandlerBuilder springTransactionErrorHandler(String policyRef) {
SpringTransactionErrorHandlerBuilder answer = new SpringTransactionErrorHandlerBuilder();
answer.setTransactedPolicyRef(policyRef);
return answer;
}
// Properties
// -------------------------------------------------------------------------
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
if (camelContext != null) {
this.camelContext = camelContext;
}
}
/**
* Get the {@link CamelContext}
*
* @return camelContext the Camel context
*/
public CamelContext getContext() {
return getCamelContext();
}
public ErrorHandlerFactory getErrorHandlerFactory() {
if (!hasErrorHandlerFactory()) {
errorHandlerFactory = createErrorHandlerBuilder();
}
return errorHandlerFactory;
}
protected ErrorHandlerFactory createErrorHandlerBuilder() {
return new DefaultErrorHandlerBuilder();
}
/**
* Sets the error handler to use with processors created by this builder
*/
public void setErrorHandlerFactory(ErrorHandlerFactory errorHandlerFactory) {
this.errorHandlerFactory = errorHandlerFactory;
}
/**
*
* @return true if an error handler factory was initialized
*/
public boolean hasErrorHandlerFactory() {
return this.errorHandlerFactory != null;
}
}
| BuilderSupport |
java | greenrobot__greendao | tests/DaoTestBase/src/main/java/org/greenrobot/greendao/daotest2/dao/KeepEntityDao.java | {
"start": 526,
"end": 795
} | class ____ extends AbstractDao<KeepEntity, Long> {
public static final String TABLENAME = "KEEP_ENTITY";
/**
* Properties of entity KeepEntity.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static | KeepEntityDao |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/ContextEvents.java | {
"start": 931,
"end": 1045
} | enum ____ {
/**
* Boot event
*/
BOOT,
/**
* Start event
*/
START
}
| ContextEvents |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/usage/UsageService.java | {
"start": 862,
"end": 3627
} | class ____ {
private final Map<String, BaseRestHandler> handlers;
private final SearchUsageHolder searchUsageHolder;
private final CCSUsageTelemetry ccsUsageHolder;
private final CCSUsageTelemetry esqlUsageHolder;
public UsageService() {
this.handlers = new HashMap<>();
this.searchUsageHolder = new SearchUsageHolder();
this.ccsUsageHolder = new CCSUsageTelemetry();
this.esqlUsageHolder = new CCSUsageTelemetry(false);
}
/**
* Add a REST handler to this service.
*
* @param handler the {@link BaseRestHandler} to add to the usage service.
*/
public void addRestHandler(BaseRestHandler handler) {
Objects.requireNonNull(handler);
if (handler.getName() == null) {
throw new IllegalArgumentException("handler of type [" + handler.getClass().getName() + "] does not have a name");
}
final BaseRestHandler maybeHandler = handlers.put(handler.getName(), handler);
/*
* Handlers will be registered multiple times, once for each route that the handler handles. This means that we will see handlers
* multiple times, so we do not have a conflict if we are seeing the same instance multiple times. So, we only reject if a handler
* with the same name was registered before, and it is not the same instance as before.
*/
if (maybeHandler != null && maybeHandler != handler) {
final String message = String.format(
Locale.ROOT,
"handler of type [%s] conflicts with handler of type [%s] as they both have the same name [%s]",
handler.getClass().getName(),
maybeHandler.getClass().getName(),
handler.getName()
);
throw new IllegalArgumentException(message);
}
}
/**
* Get the current usage statistics for this node.
*
* @return the {@link NodeUsage} representing the usage statistics for this
* node
*/
public Map<String, Long> getRestUsageStats() {
Map<String, Long> restUsageMap;
restUsageMap = new HashMap<>();
handlers.values().forEach(handler -> {
long usageCount = handler.getUsageCount();
if (usageCount > 0) {
restUsageMap.put(handler.getName(), usageCount);
}
});
return restUsageMap;
}
/**
* Returns the search usage holder
*/
public SearchUsageHolder getSearchUsageHolder() {
return searchUsageHolder;
}
public CCSUsageTelemetry getCcsUsageHolder() {
return ccsUsageHolder;
}
public CCSUsageTelemetry getEsqlUsageHolder() {
return esqlUsageHolder;
}
}
| UsageService |
java | alibaba__nacos | common/src/test/java/com/alibaba/nacos/common/http/param/QueryTest.java | {
"start": 1055,
"end": 2786
} | class ____ {
@Test
void testInitParams() {
Map<String, String> parameters = new LinkedHashMap<String, String>();
parameters.put(CommonParams.NAMESPACE_ID, "namespace");
parameters.put(CommonParams.SERVICE_NAME, "service");
parameters.put(CommonParams.GROUP_NAME, "group");
parameters.put(CommonParams.CLUSTER_NAME, null);
parameters.put("ip", "1.1.1.1");
parameters.put("port", String.valueOf(9999));
parameters.put("weight", String.valueOf(1.0));
parameters.put("ephemeral", String.valueOf(true));
String excepted = "namespaceId=namespace&serviceName=service&groupName=group&ip=1.1.1.1&port=9999&weight=1.0&ephemeral=true";
Query actual = Query.newInstance().initParams(parameters);
assertEquals(excepted, actual.toQueryUrl());
assertEquals("namespace", actual.getValue(CommonParams.NAMESPACE_ID));
}
@Test
void testAddParams() throws Exception {
Query query = Query.newInstance().addParam("key-1", "value-1").addParam("key-2", "value-2");
String s1 = query.toQueryUrl();
String s2 =
"key-1=" + URLEncoder.encode("value-1", StandardCharsets.UTF_8.name()) + "&key-2=" + URLEncoder.encode("value-2",
StandardCharsets.UTF_8.name());
assertEquals(s2, s1);
assertEquals("value-1", query.getValue("key-1"));
}
@Test
void testClear() {
Query query = Query.newInstance().addParam("key-1", "value-1").addParam("key-2", "value-2");
assertFalse(query.isEmpty());
assertEquals("value-1", query.getValue("key-1"));
query.clear();
assertTrue(query.isEmpty());
}
}
| QueryTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java | {
"start": 26004,
"end": 30742
} | class ____ implements Predicate<PersistentTasksCustomMetadata.PersistentTask<?>> {
private volatile Exception exception;
private volatile String node = "";
private volatile String assignmentExplanation;
@Override
public boolean test(PersistentTasksCustomMetadata.PersistentTask<?> persistentTask) {
if (persistentTask == null) {
return false;
}
PersistentTasksCustomMetadata.Assignment assignment = persistentTask.getAssignment();
// This means we are awaiting a new node to be spun up, ok to return back to the user to await node creation
if (assignment != null && assignment.equals(JobNodeSelector.AWAITING_LAZY_ASSIGNMENT)) {
return true;
}
String reason = "__unknown__";
if (assignment != null
&& assignment.equals(PersistentTasksCustomMetadata.INITIAL_ASSIGNMENT) == false
&& assignment.isAssigned() == false) {
assignmentExplanation = assignment.getExplanation();
// Assignment failed due to primary shard check.
// This is hopefully intermittent and we should allow another assignment attempt.
if (assignmentExplanation.contains(PRIMARY_SHARDS_INACTIVE)) {
return false;
}
exception = new ElasticsearchStatusException(
"Could not start data frame analytics task, allocation explanation [{}]",
RestStatus.TOO_MANY_REQUESTS,
assignment.getExplanation()
);
return true;
}
DataFrameAnalyticsTaskState taskState = (DataFrameAnalyticsTaskState) persistentTask.getState();
reason = taskState != null ? taskState.getReason() : reason;
DataFrameAnalyticsState analyticsState = taskState == null ? DataFrameAnalyticsState.STOPPED : taskState.getState();
switch (analyticsState) {
case STARTED:
node = persistentTask.getExecutorNode();
return true;
case STOPPING:
exception = ExceptionsHelper.conflictStatusException("the task has been stopped while waiting to be started");
return true;
// The STARTING case here is expected to be incredibly short-lived, just occurring during the
// time period when a job has successfully been assigned to a node but the request to update
// its task state is still in-flight. (The long-lived STARTING case when a lazy node needs to
// be added to the cluster to accommodate the job was dealt with higher up this method when the
// magic AWAITING_LAZY_ASSIGNMENT assignment was checked for.)
case STARTING:
case STOPPED:
return false;
case FAILED:
default:
exception = ExceptionsHelper.serverError(
"Unexpected task state [{}] {}while waiting to be started",
analyticsState,
reason == null ? "" : "with reason [" + reason + "] "
);
return true;
}
}
}
private void cancelAnalyticsStart(
PersistentTasksCustomMetadata.PersistentTask<TaskParams> persistentTask,
Exception exception,
ActionListener<NodeAcknowledgedResponse> listener
) {
persistentTasksService.sendRemoveRequest(
persistentTask.getId(),
MachineLearning.HARD_CODED_MACHINE_LEARNING_MASTER_NODE_TIMEOUT,
new ActionListener<>() {
@Override
public void onResponse(PersistentTasksCustomMetadata.PersistentTask<?> task) {
// We succeeded in cancelling the persistent task, but the
// problem that caused us to cancel it is the overall result
listener.onFailure(exception);
}
@Override
public void onFailure(Exception e) {
logger.error(
() -> format(
"[%s] Failed to cancel persistent task that could not be assigned due to [%s]",
persistentTask.getParams().getId(),
exception.getMessage()
),
e
);
listener.onFailure(exception);
}
}
);
}
public static | AnalyticsPredicate |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/runtime/src/main/java/io/quarkus/resteasy/runtime/standalone/ResteasyStandaloneRecorder.java | {
"start": 16223,
"end": 17255
} | class ____ implements BufferAllocator {
private final int bufferSize;
private ResteasyVertxAllocator(int bufferSize) {
this.bufferSize = bufferSize;
}
@Override
public ByteBuf allocateBuffer() {
return allocateBuffer(useDirect);
}
@Override
public ByteBuf allocateBuffer(boolean direct) {
return allocateBuffer(direct, bufferSize);
}
@Override
public ByteBuf allocateBuffer(int bufferSize) {
return allocateBuffer(useDirect, bufferSize);
}
@Override
public ByteBuf allocateBuffer(boolean direct, int bufferSize) {
if (direct) {
return PooledByteBufAllocator.DEFAULT.directBuffer(bufferSize);
} else {
return PooledByteBufAllocator.DEFAULT.heapBuffer(bufferSize);
}
}
@Override
public int getBufferSize() {
return bufferSize;
}
}
}
| ResteasyVertxAllocator |
java | google__dagger | javatests/artifacts/hilt-android/simple/deep-android-lib/src/main/java/dagger/hilt/android/simple/deep/DeepAndroidLib.java | {
"start": 952,
"end": 1248
} | interface ____ {
DeepAndroidLib getDeepAndroidInstance();
}
@Inject
public DeepAndroidLib() {}
public static DeepAndroidLib getInstance(Context context) {
return EntryPointAccessors.fromApplication(context, LibEntryPoint.class)
.getDeepAndroidInstance();
}
}
| LibEntryPoint |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/WeightedRandomPicker.java | {
"start": 1127,
"end": 1351
} | class ____ extends SubchannelPicker {
@VisibleForTesting
final List<WeightedChildPicker> weightedChildPickers;
private final ThreadSafeRandom random;
private final long totalWeight;
static final | WeightedRandomPicker |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NullableOptionalTest.java | {
"start": 1710,
"end": 2162
} | class ____ {
@Nullable
// BUG: Diagnostic contains:
private Optional<Object> foo;
}
""")
.doTest();
}
@Test
public void methodReturnsOptionalWithNullableAnnotation_showsError() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.util.Optional;
import javax.annotation.Nullable;
final | Test |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java | {
"start": 1028,
"end": 2538
} | class ____ extends UnaryExpression implements EvaluatorMapper {
public DeepCopy(Source source, Expression child) {
super(source, child);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public String getWriteableName() {
throw new UnsupportedOperationException();
}
@Override
public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
EvalOperator.ExpressionEvaluator.Factory childEval = toEvaluator.apply(child());
return ctx -> new EvalOperator.ExpressionEvaluator() {
private final EvalOperator.ExpressionEvaluator child = childEval.get(ctx);
@Override
public Block eval(Page page) {
try (Block block = child.eval(page)) {
return BlockUtils.deepCopyOf(block, ctx.blockFactory());
}
}
@Override
public long baseRamBytesUsed() {
return 0;
}
@Override
public void close() {
Releasables.closeExpectNoException(child);
}
};
}
@Override
protected UnaryExpression replaceChild(Expression newChild) {
return new DeepCopy(source(), newChild);
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, DeepCopy::new, child());
}
}
| DeepCopy |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/QueryParams.java | {
"start": 526,
"end": 3535
} | class ____ {
private final List<QueryParam> params; // This matches the named or unnamed parameters specified in an EsqlQueryRequest.params
private final Map<String, QueryParam> nameToParam; // This matches the named parameters specified in an EsqlQueryRequest.params
private Map<Token, QueryParam> tokenToParam; // This is populated by EsqlParser, each parameter marker has an entry
private List<ParsingException> parsingErrors;
private final int paramsCount;
public QueryParams() {
this(null);
}
public QueryParams(List<QueryParam> params) {
this.tokenToParam = new HashMap<>();
this.parsingErrors = new ArrayList<>();
if (params == null || params.isEmpty()) {
this.params = List.of();
this.nameToParam = Map.of();
this.paramsCount = 0;
} else {
this.paramsCount = params.size();
this.params = new ArrayList<>(paramsCount);
Map<String, QueryParam> tempNameToParam = new HashMap<>(paramsCount);
for (QueryParam p : params) {
this.params.add(p);
String name = p.name();
if (name != null) {
tempNameToParam.put(name, p);
}
}
this.nameToParam = Collections.unmodifiableMap(tempNameToParam);
}
}
public int size() {
return this.paramsCount;
}
public QueryParam get(int index) {
return (index <= 0 || index > this.paramsCount) ? null : params.get(index - 1);
}
public Map<String, QueryParam> namedParams() {
return this.nameToParam;
}
public boolean contains(String paramName) {
return this.nameToParam.containsKey(paramName);
}
public QueryParam get(String paramName) {
return nameToParam.get(paramName);
}
public boolean contains(Token token) {
return this.tokenToParam.containsKey(token);
}
public QueryParam get(Token tokenLocation) {
return this.tokenToParam.get(tokenLocation);
}
public void addTokenParam(Token token, QueryParam param) {
this.tokenToParam.put(token, param);
}
public Iterator<ParsingException> parsingErrors() {
return this.parsingErrors.iterator();
}
public void addParsingError(ParsingException e) {
this.parsingErrors.add(e);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
QueryParams that = (QueryParams) o;
return paramsCount == that.paramsCount
&& params.equals(that.params)
&& nameToParam.equals(that.nameToParam)
&& tokenToParam.equals(that.tokenToParam)
&& parsingErrors.equals(that.parsingErrors);
}
@Override
public int hashCode() {
return Objects.hash(params, nameToParam, tokenToParam, parsingErrors, paramsCount);
}
}
| QueryParams |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/jmx/export/NotificationPublisherTests.java | {
"start": 7492,
"end": 7546
} | interface ____ {
void sendNotification();
}
}
| MyMBean |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/DebeziumSqlserverComponentBuilderFactory.java | {
"start": 69542,
"end": 75192
} | class ____ returns SourceInfo
* schema and struct.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default:
* io.debezium.connector.sqlserver.SqlServerSourceInfoStructMaker
* Group: sqlserver
*
* @param sourceinfoStructMaker the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder sourceinfoStructMaker(java.lang.String sourceinfoStructMaker) {
doSetProperty("sourceinfoStructMaker", sourceinfoStructMaker);
return this;
}
/**
* A delay period after the snapshot is completed and the streaming
* begins, given in milliseconds. Defaults to 0 ms.
*
* The option is a: <code>long</code> type.
*
* Default: 0ms
* Group: sqlserver
*
* @param streamingDelayMs the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder streamingDelayMs(long streamingDelayMs) {
doSetProperty("streamingDelayMs", streamingDelayMs);
return this;
}
/**
* Specifies the maximum number of rows that should be read in one go
* from each table while streaming. The connector will read the table
* contents in multiple batches of this size. Defaults to 0 which means
* no limit.
*
* The option is a: <code>int</code> type.
*
* Group: sqlserver
*
* @param streamingFetchSize the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder streamingFetchSize(int streamingFetchSize) {
doSetProperty("streamingFetchSize", streamingFetchSize);
return this;
}
/**
* A comma-separated list of regular expressions that match the
* fully-qualified names of tables to be excluded from monitoring.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*
* @param tableExcludeList the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder tableExcludeList(java.lang.String tableExcludeList) {
doSetProperty("tableExcludeList", tableExcludeList);
return this;
}
/**
* Flag specifying whether built-in tables should be ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: sqlserver
*
* @param tableIgnoreBuiltin the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder tableIgnoreBuiltin(boolean tableIgnoreBuiltin) {
doSetProperty("tableIgnoreBuiltin", tableIgnoreBuiltin);
return this;
}
/**
* The tables for which changes are to be captured.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: sqlserver
*
* @param tableIncludeList the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder tableIncludeList(java.lang.String tableIncludeList) {
doSetProperty("tableIncludeList", tableIncludeList);
return this;
}
/**
* Time, date, and timestamps can be represented with different kinds of
* precisions, including: 'adaptive' (the default) bases the precision
* of time, date, and timestamp values on the database column's
* precision; 'adaptive_time_microseconds' like 'adaptive' mode, but
* TIME fields always use microseconds precision; 'connect' always
* represents time, date, and timestamp values using Kafka Connect's
* built-in representations for Time, Date, and Timestamp, which uses
* millisecond precision regardless of the database columns' precision.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: adaptive
* Group: sqlserver
*
* @param timePrecisionMode the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder timePrecisionMode(java.lang.String timePrecisionMode) {
doSetProperty("timePrecisionMode", timePrecisionMode);
return this;
}
/**
* Whether delete operations should be represented by a delete event and
* a subsequent tombstone event (true) or only by a delete event
* (false). Emitting the tombstone event (the default behavior) allows
* Kafka to completely delete all events pertaining to the given key
* once the source record got deleted.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: sqlserver
*
* @param tombstonesOnDelete the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder tombstonesOnDelete(boolean tombstonesOnDelete) {
doSetProperty("tombstonesOnDelete", tombstonesOnDelete);
return this;
}
/**
* The name of the TopicNamingStrategy | that |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InvokerBuilder.java | {
"start": 6018,
"end": 6416
} | class ____
* declares the transformer</li>
* </ul>
*
* An input transformer must produce a type that can be consumed by the target method.
* Specifically: when {@code X} is <em>any-type</em>, it is not type checked during deployment.
* Otherwise, it is a deployment problem if {@code X} is not assignable to the corresponding type
* in the declaration of the target method (that is the bean | that |
java | grpc__grpc-java | xds/src/test/java/io/grpc/xds/orca/OrcaOobUtilTest.java | {
"start": 38828,
"end": 40498
} | class ____ extends Subchannel {
final List<EquivalentAddressGroup> eagList;
final Attributes attrs;
final Channel channel;
final List<String> logs = new ArrayList<>();
final int index;
SubchannelStateListener stateListener;
private final ChannelLogger logger =
new ChannelLogger() {
@Override
public void log(ChannelLogLevel level, String msg) {
logs.add(level + ": " + msg);
}
@Override
public void log(ChannelLogLevel level, String template, Object... args) {
log(level, MessageFormat.format(template, args));
}
};
FakeSubchannel(int index, CreateSubchannelArgs args, Channel channel) {
this.index = index;
this.eagList = args.getAddresses();
this.attrs = args.getAttributes();
this.channel = checkNotNull(channel);
}
@Override
public void start(SubchannelStateListener listener) {
checkState(this.stateListener == null);
this.stateListener = listener;
}
@Override
public void shutdown() {
deliverSubchannelState(index, ConnectivityStateInfo.forNonError(SHUTDOWN));
}
@Override
public void requestConnection() {
throw new AssertionError("Should not be called");
}
@Override
public List<EquivalentAddressGroup> getAllAddresses() {
return eagList;
}
@Override
public Attributes getAttributes() {
return attrs;
}
@Override
public Channel asChannel() {
return channel;
}
@Override
public ChannelLogger getChannelLogger() {
return logger;
}
}
private final | FakeSubchannel |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/LinuxContainerRuntimeConstants.java | {
"start": 1374,
"end": 4682
} | enum ____ {
DEFAULT,
DOCKER,
JAVASANDBOX,
RUNC;
}
public static final Attribute<Map> LOCALIZED_RESOURCES = Attribute
.attribute(Map.class, "localized_resources");
public static final Attribute<List> CONTAINER_LAUNCH_PREFIX_COMMANDS =
Attribute.attribute(List.class, "container_launch_prefix_commands");
public static final Attribute<String> RUN_AS_USER =
Attribute.attribute(String.class, "run_as_user");
public static final Attribute<String> USER = Attribute.attribute(String.class,
"user");
public static final Attribute<String> APPID =
Attribute.attribute(String.class, "appid");
public static final Attribute<String> CONTAINER_ID_STR = Attribute
.attribute(String.class, "container_id_str");
public static final Attribute<Path> CONTAINER_WORK_DIR = Attribute
.attribute(Path.class, "container_work_dir");
public static final Attribute<Path> NM_PRIVATE_CONTAINER_SCRIPT_PATH =
Attribute.attribute(Path.class, "nm_private_container_script_path");
public static final Attribute<Path> NM_PRIVATE_TOKENS_PATH = Attribute
.attribute(Path.class, "nm_private_tokens_path");
public static final Attribute<Path> NM_PRIVATE_KEYSTORE_PATH = Attribute
.attribute(Path.class, "nm_private_keystore_path");
public static final Attribute<Path> NM_PRIVATE_TRUSTSTORE_PATH = Attribute
.attribute(Path.class, "nm_private_truststore_path");
public static final Attribute<Path> PID_FILE_PATH = Attribute.attribute(
Path.class, "pid_file_path");
public static final Attribute<List> LOCAL_DIRS = Attribute.attribute(
List.class, "local_dirs");
public static final Attribute<List> LOG_DIRS = Attribute.attribute(
List.class, "log_dirs");
public static final Attribute<List> FILECACHE_DIRS = Attribute.attribute(
List.class, "filecache_dirs");
public static final Attribute<List> USER_LOCAL_DIRS = Attribute.attribute(
List.class, "user_local_dirs");
public static final Attribute<List> CONTAINER_LOCAL_DIRS = Attribute
.attribute(List.class, "container_local_dirs");
public static final Attribute<List> USER_FILECACHE_DIRS = Attribute
.attribute(List.class, "user_filecache_dirs");
public static final Attribute<List> APPLICATION_LOCAL_DIRS = Attribute
.attribute(List.class, "application_local_dirs");
public static final Attribute<List> CONTAINER_LOG_DIRS = Attribute.attribute(
List.class, "container_log_dirs");
public static final Attribute<String> RESOURCES_OPTIONS = Attribute.attribute(
String.class, "resources_options");
public static final Attribute<String> TC_COMMAND_FILE = Attribute.attribute(
String.class, "tc_command_file");
public static final Attribute<List> CONTAINER_RUN_CMDS = Attribute.attribute(
List.class, "container_run_cmds");
public static final Attribute<String> CGROUP_RELATIVE_PATH = Attribute
.attribute(String.class, "cgroup_relative_path");
public static final Attribute<String> PID = Attribute.attribute(
String.class, "pid");
public static final Attribute<ContainerExecutor.Signal> SIGNAL = Attribute
.attribute(ContainerExecutor.Signal.class, "signal");
public static final Attribute<String> PROCFS = Attribute.attribute(
String.class, "procfs");
} | RuntimeType |
java | resilience4j__resilience4j | resilience4j-core/src/test/java/io/github/resilience4j/core/registry/InMemoryRegistryStoreTest.java | {
"start": 848,
"end": 4182
} | class ____ {
private static final String DEFAULT_CONFIG_VALUE = "defaultConfig";
private static final String DEFAULT_CONFIG = "default";
private static final String NEW_CONFIG = "newConfig";
private static final String CUSTOM_CONFIG = "custom";
private InMemoryRegistryStore<String> inMemoryRegistryStore;
@Before
public void initialiseInMemoryRegistryStore() {
inMemoryRegistryStore = new InMemoryRegistryStore<>();
}
@Test
public void shouldComputeValueWhenKeyNotPresentInRegistryStore() {
assertEquals("Wrong Value",
DEFAULT_CONFIG_VALUE, inMemoryRegistryStore.computeIfAbsent(DEFAULT_CONFIG, k -> DEFAULT_CONFIG_VALUE));
assertThat(inMemoryRegistryStore.values()).hasSize(1);
assertEquals("Wrong Value", DEFAULT_CONFIG_VALUE, inMemoryRegistryStore.computeIfAbsent(DEFAULT_CONFIG, k -> NEW_CONFIG));
}
@Test
public void shouldPutKeyIntoRegistryStoreAndReturnOldValue() {
assertNull(inMemoryRegistryStore.putIfAbsent(DEFAULT_CONFIG, DEFAULT_CONFIG_VALUE));
assertThat(inMemoryRegistryStore.values()).hasSize(1);
assertEquals("Wrong Value", DEFAULT_CONFIG_VALUE, inMemoryRegistryStore.putIfAbsent(DEFAULT_CONFIG, NEW_CONFIG));
}
@Test(expected = NullPointerException.class)
public void shouldThrowNPEWhenValueIsNull() {
inMemoryRegistryStore.putIfAbsent(DEFAULT_CONFIG, null);
}
@Test(expected = NullPointerException.class)
public void shouldThrowNPEWhenKeyIsNull() {
inMemoryRegistryStore.putIfAbsent(null, DEFAULT_CONFIG_VALUE);
}
@Test
public void shouldFindConfigFromRegistryStore() {
inMemoryRegistryStore.putIfAbsent(DEFAULT_CONFIG, DEFAULT_CONFIG_VALUE);
assertThat(inMemoryRegistryStore.find(DEFAULT_CONFIG)).isNotEmpty();
assertThat(inMemoryRegistryStore.find(DEFAULT_CONFIG)).hasValue(DEFAULT_CONFIG_VALUE);
assertThat(inMemoryRegistryStore.find(NEW_CONFIG)).isEmpty();
}
@Test
public void shouldRemoveConfigFromRegistryStore() {
inMemoryRegistryStore.putIfAbsent(DEFAULT_CONFIG, DEFAULT_CONFIG_VALUE);
inMemoryRegistryStore.putIfAbsent(CUSTOM_CONFIG, NEW_CONFIG);
assertThat(inMemoryRegistryStore.remove(DEFAULT_CONFIG)).hasValue(DEFAULT_CONFIG_VALUE);
assertThat(inMemoryRegistryStore.values()).hasSize(1);
}
@Test
public void shouldReplaceKeyWithNewConfigValueWhenKeyPresent() {
inMemoryRegistryStore.putIfAbsent(DEFAULT_CONFIG, DEFAULT_CONFIG_VALUE);
assertThat(inMemoryRegistryStore.replace(DEFAULT_CONFIG, NEW_CONFIG)).hasValue(DEFAULT_CONFIG_VALUE);
assertThat(inMemoryRegistryStore.find(DEFAULT_CONFIG)).hasValue(NEW_CONFIG);
}
@Test
public void shouldNotReplaceKeyWithNewConfigValueWhenKeyAbsent() {
assertThat(inMemoryRegistryStore.replace(NEW_CONFIG, NEW_CONFIG)).isEmpty();
assertThat(inMemoryRegistryStore.values()).isEmpty();
}
@Test
public void shouldReturnCollectionOfConfigs() {
inMemoryRegistryStore.putIfAbsent(DEFAULT_CONFIG, DEFAULT_CONFIG_VALUE);
inMemoryRegistryStore.putIfAbsent(CUSTOM_CONFIG, NEW_CONFIG);
assertThat(inMemoryRegistryStore.values()).containsExactlyInAnyOrder(NEW_CONFIG, DEFAULT_CONFIG_VALUE);
}
}
| InMemoryRegistryStoreTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/CoreTypePool.java | {
"start": 1823,
"end": 3398
} | class ____ also want to enable
//ByteBuddy's default caching mechanism as it will cache the more
//useful output of the parsing and introspection of such types.
super( new CoreCacheProvider( acceptedPrefixes ) );
this.acceptedPrefixes = Objects.requireNonNull( acceptedPrefixes );
}
@Override
protected Resolution doDescribe(final String name) {
if ( acceptedPrefixes.isCoreClassName( name ) ) {
final Resolution resolution = resolutions.get( name );
if ( resolution != null ) {
return resolution;
}
else {
//We implement this additional layer of caching, which is on top of
//ByteBuddy's default caching, so as to prevent resolving the same
//types concurrently from the classloader.
//This is merely an efficiency improvement and will NOT provide a
//strict guarantee of symbols being resolved exactly once as there
//is no SPI within ByteBuddy which would allow this: the point is to
//make it exceptionally infrequent, which greatly helps with
//processing of large models.
return resolutions.computeIfAbsent( name, this::actualResolve );
}
}
else {
//These are not cached to not leak references to application code names
return new Resolution.Illegal( name );
}
}
private Resolution actualResolve(final String name) {
try {
final Class<?> aClass = Class.forName( name, false, hibernateClassLoader );
return new TypePool.Resolution.Simple( TypeDescription.ForLoadedType.of( aClass ) );
}
catch ( ClassNotFoundException e ) {
return new Resolution.Illegal( name );
}
}
}
| we |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/scripting/xmltags/OgnlMemberAccess.java | {
"start": 1323,
"end": 2296
} | class ____ implements MemberAccess {
private final boolean canControlMemberAccessible;
OgnlMemberAccess() {
this.canControlMemberAccessible = Reflector.canControlMemberAccessible();
}
@Override
public Object setup(OgnlContext context, Object target, Member member, String propertyName) {
Object result = null;
if (isAccessible(context, target, member, propertyName)) {
AccessibleObject accessible = (AccessibleObject) member;
if (!accessible.canAccess(target)) {
result = Boolean.FALSE;
accessible.setAccessible(true);
}
}
return result;
}
@Override
public void restore(OgnlContext context, Object target, Member member, String propertyName, Object state) {
// Flipping accessible flag is not thread safe. See #1648
}
@Override
public boolean isAccessible(OgnlContext context, Object target, Member member, String propertyName) {
return canControlMemberAccessible;
}
}
| OgnlMemberAccess |
java | quarkusio__quarkus | independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/InterceptedDecoratedBeanMetadataProvider.java | {
"start": 476,
"end": 1074
} | class ____ implements InjectableReferenceProvider<Contextual<?>> {
@Override
public Contextual<?> get(CreationalContext<Contextual<?>> creationalContext) {
// First attempt to obtain the creational context of the interceptor bean and then the creational context of the intercepted bean
CreationalContextImpl<?> parent = unwrap(creationalContext).getParent();
if (parent != null) {
parent = parent.getParent();
return parent != null ? parent.getContextual() : null;
}
return null;
}
}
| InterceptedDecoratedBeanMetadataProvider |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/longarrays/LongArrays_assertEndsWith_Test.java | {
"start": 1723,
"end": 7145
} | class ____ extends LongArraysBaseTest {
@Override
protected void initActualArray() {
actual = arrayOf(6L, 8L, 10L, 12L);
}
@Test
void should_throw_error_if_sequence_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertEndsWith(someInfo(), actual, null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
actual = emptyArray();
arrays.assertEndsWith(someInfo(), actual, emptyArray());
}
@Test
void should_pass_if_array_of_values_to_look_for_is_empty_and_actual_is_not() {
arrays.assertEndsWith(someInfo(), actual, emptyArray());
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertEndsWith(someInfo(), null, arrayOf(8L)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_sequence_is_bigger_than_actual() {
AssertionInfo info = someInfo();
long[] sequence = { 6L, 8L, 10L, 12L, 20L, 22L };
Throwable error = catchThrowable(() -> arrays.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldEndWith(actual, sequence));
}
@Test
void should_fail_if_actual_does_not_end_with_sequence() {
AssertionInfo info = someInfo();
long[] sequence = { 20L, 22L };
Throwable error = catchThrowable(() -> arrays.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldEndWith(actual, sequence));
}
@Test
void should_fail_if_actual_ends_with_first_elements_of_sequence_only() {
AssertionInfo info = someInfo();
long[] sequence = { 6L, 20L, 22L };
Throwable error = catchThrowable(() -> arrays.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldEndWith(actual, sequence));
}
@Test
void should_pass_if_actual_ends_with_sequence() {
arrays.assertEndsWith(someInfo(), actual, arrayOf(8L, 10L, 12L));
}
@Test
void should_pass_if_actual_and_sequence_are_equal() {
arrays.assertEndsWith(someInfo(), actual, arrayOf(6L, 8L, 10L, 12L));
}
@Test
void should_throw_error_if_sequence_is_null_whatever_custom_comparison_strategy_is() {
assertThatNullPointerException().isThrownBy(() -> arraysWithCustomComparisonStrategy.assertEndsWith(someInfo(),
actual, null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_pass_if_array_of_values_to_look_for_is_empty_and_actual_is_not_whatever_custom_comparison_strategy_is() {
arraysWithCustomComparisonStrategy.assertEndsWith(someInfo(), actual, emptyArray());
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertEndsWith(someInfo(),
null,
arrayOf(-8L)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_sequence_is_bigger_than_actual_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
long[] sequence = { 6L, -8L, 10L, 12L, 20L, 22L };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldEndWith(actual, sequence, absValueComparisonStrategy));
}
@Test
void should_fail_if_actual_does_not_end_with_sequence_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
long[] sequence = { 20L, 22L };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldEndWith(actual, sequence, absValueComparisonStrategy));
}
@Test
void should_fail_if_actual_ends_with_first_elements_of_sequence_only_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
long[] sequence = { 6L, 20L, 22L };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldEndWith(actual, sequence, absValueComparisonStrategy));
}
@Test
void should_pass_if_actual_ends_with_sequence_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertEndsWith(someInfo(), actual, arrayOf(-8L, 10L, 12L));
}
@Test
void should_pass_if_actual_and_sequence_are_equal_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertEndsWith(someInfo(), actual, arrayOf(6L, -8L, 10L, 12L));
}
}
| LongArrays_assertEndsWith_Test |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/test/java/org/springframework/boot/buildpack/platform/docker/DockerApiTests.java | {
"start": 36579,
"end": 37987
} | class ____ {
private SystemApi api;
@BeforeEach
void setup() {
this.api = DockerApiTests.this.dockerApi.system();
}
@Test
void getApiVersionWithVersionHeaderReturnsVersion() throws Exception {
given(http().head(eq(new URI(PING_URL))))
.willReturn(responseWithHeaders(new BasicHeader(DockerApi.API_VERSION_HEADER_NAME, "1.44")));
assertThat(this.api.getApiVersion()).isEqualTo(ApiVersion.of(1, 44));
}
@Test
void getApiVersionWithEmptyVersionHeaderReturnsUnknownVersion() throws Exception {
given(http().head(eq(new URI(PING_URL))))
.willReturn(responseWithHeaders(new BasicHeader(DockerApi.API_VERSION_HEADER_NAME, "")));
assertThat(this.api.getApiVersion()).isEqualTo(DockerApi.UNKNOWN_API_VERSION);
}
@Test
void getApiVersionWithNoVersionHeaderReturnsUnknownVersion() throws Exception {
given(http().head(eq(new URI(PING_URL)))).willReturn(emptyResponse());
assertThat(this.api.getApiVersion()).isEqualTo(DockerApi.UNKNOWN_API_VERSION);
}
@Test
void getApiVersionWithExceptionReturnsUnknownVersion(CapturedOutput output) throws Exception {
given(http().head(eq(new URI(PING_URL)))).willThrow(new IOException("simulated error"));
assertThat(this.api.getApiVersion()).isEqualTo(DockerApi.UNKNOWN_API_VERSION);
assertThat(output).contains("Warning: Failed to determine Docker API version: simulated error");
}
}
}
| SystemDockerApiTests |
java | spring-projects__spring-security | oauth2/oauth2-core/src/test/java/org/springframework/security/oauth2/core/user/OAuth2UserAuthorityTests.java | {
"start": 1055,
"end": 3450
} | class ____ {
private static final String AUTHORITY = "ROLE_USER";
private static final Map<String, Object> ATTRIBUTES = Collections.singletonMap("username", "test");
private static final OAuth2UserAuthority AUTHORITY_WITH_OBJECTURL;
private static final OAuth2UserAuthority AUTHORITY_WITH_STRINGURL;
static {
try {
AUTHORITY_WITH_OBJECTURL = new OAuth2UserAuthority(
Collections.singletonMap("someurl", new URL("https://localhost")));
AUTHORITY_WITH_STRINGURL = new OAuth2UserAuthority(
Collections.singletonMap("someurl", "https://localhost"));
}
catch (MalformedURLException ex) {
throw new RuntimeException(ex);
}
}
@Test
public void constructorWhenAuthorityIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> new OAuth2UserAuthority(null, ATTRIBUTES));
}
@Test
public void constructorWhenAttributesIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> new OAuth2UserAuthority(AUTHORITY, null));
}
@Test
public void constructorWhenAttributesIsEmptyThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OAuth2UserAuthority(AUTHORITY, Collections.emptyMap()));
}
@Test
public void constructorWhenAllParametersProvidedAndValidThenCreated() {
OAuth2UserAuthority userAuthority = new OAuth2UserAuthority(AUTHORITY, ATTRIBUTES);
assertThat(userAuthority.getAuthority()).isEqualTo(AUTHORITY);
assertThat(userAuthority.getAttributes()).isEqualTo(ATTRIBUTES);
}
@Test
public void equalsRegardlessOfUrlType() {
assertThat(AUTHORITY_WITH_OBJECTURL).isEqualTo(AUTHORITY_WITH_OBJECTURL);
assertThat(AUTHORITY_WITH_STRINGURL).isEqualTo(AUTHORITY_WITH_STRINGURL);
assertThat(AUTHORITY_WITH_OBJECTURL).isEqualTo(AUTHORITY_WITH_STRINGURL);
assertThat(AUTHORITY_WITH_STRINGURL).isEqualTo(AUTHORITY_WITH_OBJECTURL);
}
@Test
public void hashCodeIsSameRegardlessOfUrlType() {
assertThat(AUTHORITY_WITH_OBJECTURL.hashCode()).isEqualTo(AUTHORITY_WITH_OBJECTURL.hashCode());
assertThat(AUTHORITY_WITH_STRINGURL.hashCode()).isEqualTo(AUTHORITY_WITH_STRINGURL.hashCode());
assertThat(AUTHORITY_WITH_OBJECTURL.hashCode()).isEqualTo(AUTHORITY_WITH_STRINGURL.hashCode());
assertThat(AUTHORITY_WITH_STRINGURL.hashCode()).isEqualTo(AUTHORITY_WITH_OBJECTURL.hashCode());
}
}
| OAuth2UserAuthorityTests |
java | alibaba__fastjson | src/test/java/com/alibaba/fastjson/support/jaxrs/TestIssue885.java | {
"start": 416,
"end": 493
} | class ____ extends JerseyTest {
@Path("user")
public static | TestIssue885 |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/errors/RetryWithToleranceOperator.java | {
"start": 2876,
"end": 4995
} | class ____<T> implements AutoCloseable {
private static final Logger log = LoggerFactory.getLogger(RetryWithToleranceOperator.class);
public static final long RETRIES_DELAY_MIN_MS = 300;
private static final Map<Stage, Class<? extends Exception>> TOLERABLE_EXCEPTIONS = new HashMap<>();
static {
TOLERABLE_EXCEPTIONS.put(Stage.TRANSFORMATION, Exception.class);
TOLERABLE_EXCEPTIONS.put(Stage.HEADER_CONVERTER, Exception.class);
TOLERABLE_EXCEPTIONS.put(Stage.KEY_CONVERTER, Exception.class);
TOLERABLE_EXCEPTIONS.put(Stage.VALUE_CONVERTER, Exception.class);
}
private final long errorRetryTimeout;
private final long errorMaxDelayInMillis;
private final ToleranceType errorToleranceType;
private long totalFailures = 0;
private final Time time;
private final ErrorHandlingMetrics errorHandlingMetrics;
private final CountDownLatch stopRequestedLatch;
private volatile boolean stopping; // indicates whether the operator has been asked to stop retrying
private List<ErrorReporter<T>> reporters;
public RetryWithToleranceOperator(long errorRetryTimeout, long errorMaxDelayInMillis,
ToleranceType toleranceType, Time time, ErrorHandlingMetrics errorHandlingMetrics) {
this(errorRetryTimeout, errorMaxDelayInMillis, toleranceType, time, errorHandlingMetrics, new CountDownLatch(1));
}
RetryWithToleranceOperator(long errorRetryTimeout, long errorMaxDelayInMillis,
ToleranceType toleranceType, Time time, ErrorHandlingMetrics errorHandlingMetrics,
CountDownLatch stopRequestedLatch) {
this.errorRetryTimeout = errorRetryTimeout;
this.errorMaxDelayInMillis = errorMaxDelayInMillis;
this.errorToleranceType = toleranceType;
this.time = time;
this.errorHandlingMetrics = errorHandlingMetrics;
this.stopRequestedLatch = stopRequestedLatch;
this.stopping = false;
this.reporters = List.of();
}
/**
* Inform this | RetryWithToleranceOperator |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/spr8761/Spr8761Tests.java | {
"start": 1556,
"end": 1637
} | class ____ {
@Retention(RetentionPolicy.RUNTIME)
@Component
@ | WithNestedAnnotation |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringSetHeadersTest.java | {
"start": 1044,
"end": 1295
} | class ____ extends SetHeadersProcessorTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/setHeaders.xml");
}
}
| SpringSetHeadersTest |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/JobGraphGeneratorTestBase.java | {
"start": 109283,
"end": 109743
} | class ____<T>
implements OutputFormat<T> {
@Override
public void configure(Configuration parameters) {}
@Override
public void open(InitializationContext context) throws IOException {}
@Override
public void writeRecord(T record) throws IOException {}
@Override
public void close() throws IOException {}
}
private static | TestingOutputFormatNotSupportConcurrentExecutionAttempts |
java | apache__avro | lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java | {
"start": 4401,
"end": 12169
} | interface ____ creating a new CallFuture<T>, passing it
* in as the Callback parameter, and then waiting on that Future.
*
* @param <T> the return type of the message.
* @param messageName the name of the message to invoke.
* @param request the request data to send.
* @param callback the callback which will be invoked when the response is
* returned or an error occurs.
* @throws AvroRemoteException if an exception is thrown to client by server.
* @throws IOException if an I/O error occurs while sending the
* message.
* @throws AvroRuntimeException for another undeclared error while sending the
* message.
*/
public <T> void request(String messageName, Object request, Callback<T> callback)
throws AvroRemoteException, IOException {
request(new Request(messageName, request, new RPCContext()), callback);
}
/** Writes a request message and returns the result through a Callback. */
<T> void request(Request request, Callback<T> callback) throws AvroRemoteException, IOException {
Transceiver t = getTransceiver();
if (!t.isConnected()) {
// Acquire handshake lock so that only one thread is performing the
// handshake and other threads block until the handshake is completed
handshakeLock.lock();
try {
if (t.isConnected()) {
// Another thread already completed the handshake; no need to hold
// the write lock
handshakeLock.unlock();
} else {
CallFuture<T> callFuture = new CallFuture<>(callback);
t.transceive(request.getBytes(), new TransceiverCallback<>(request, callFuture));
try {
// Block until handshake complete
callFuture.await();
} catch (InterruptedException e) {
// Restore the interrupted status
Thread.currentThread().interrupt();
}
if (request.getMessage().isOneWay()) {
Throwable error = callFuture.getError();
if (error != null) {
if (error instanceof AvroRemoteException) {
throw (AvroRemoteException) error;
} else if (error instanceof AvroRuntimeException) {
throw (AvroRuntimeException) error;
} else if (error instanceof IOException) {
throw (IOException) error;
} else {
throw new AvroRuntimeException(error);
}
}
}
return;
}
} finally {
if (handshakeLock.isHeldByCurrentThread()) {
handshakeLock.unlock();
}
}
}
if (request.getMessage().isOneWay()) {
t.lockChannel();
try {
t.writeBuffers(request.getBytes());
if (callback != null) {
callback.handleResult(null);
}
} finally {
t.unlockChannel();
}
} else {
t.transceive(request.getBytes(), new TransceiverCallback<>(request, callback));
}
}
private static final ConcurrentMap<String, MD5> REMOTE_HASHES = new ConcurrentHashMap<>();
private static final ConcurrentMap<MD5, Protocol> REMOTE_PROTOCOLS = new ConcurrentHashMap<>();
private static final SpecificDatumWriter<HandshakeRequest> HANDSHAKE_WRITER = new SpecificDatumWriter<>(
HandshakeRequest.class);
private static final SpecificDatumReader<HandshakeResponse> HANDSHAKE_READER = new SpecificDatumReader<>(
HandshakeResponse.class);
private void writeHandshake(Encoder out) throws IOException {
if (getTransceiver().isConnected())
return;
MD5 localHash = new MD5();
localHash.bytes(local.getMD5());
String remoteName = transceiver.getRemoteName();
MD5 remoteHash = REMOTE_HASHES.get(remoteName);
if (remoteHash == null) { // guess remote is local
remoteHash = localHash;
remote = local;
} else {
remote = REMOTE_PROTOCOLS.get(remoteHash);
}
HandshakeRequest handshake = new HandshakeRequest();
handshake.setClientHash(localHash);
handshake.setServerHash(remoteHash);
if (sendLocalText)
handshake.setClientProtocol(local.toString());
RPCContext context = new RPCContext();
context.setHandshakeRequest(handshake);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.clientStartConnect(context);
}
handshake.setMeta(context.requestHandshakeMeta());
HANDSHAKE_WRITER.write(handshake, out);
}
private boolean readHandshake(Decoder in) throws IOException {
if (getTransceiver().isConnected())
return true;
boolean established = false;
HandshakeResponse handshake = HANDSHAKE_READER.read(null, in);
switch (handshake.getMatch()) {
case BOTH:
established = true;
sendLocalText = false;
break;
case CLIENT:
LOG.debug("Handshake match = CLIENT");
setRemote(handshake);
established = true;
sendLocalText = false;
break;
case NONE:
LOG.debug("Handshake match = NONE");
setRemote(handshake);
sendLocalText = true;
break;
default:
throw new AvroRuntimeException("Unexpected match: " + handshake.getMatch());
}
RPCContext context = new RPCContext();
context.setHandshakeResponse(handshake);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.clientFinishConnect(context);
}
if (established)
getTransceiver().setRemote(remote);
return established;
}
private void setRemote(HandshakeResponse handshake) throws IOException {
remote = Protocol.parse(handshake.getServerProtocol().toString());
MD5 remoteHash = handshake.getServerHash();
REMOTE_HASHES.put(transceiver.getRemoteName(), remoteHash);
REMOTE_PROTOCOLS.putIfAbsent(remoteHash, remote);
}
/** Return the remote protocol. Force a handshake if required. */
public Protocol getRemote() throws IOException {
if (remote != null)
return remote; // already have it
MD5 remoteHash = REMOTE_HASHES.get(transceiver.getRemoteName());
if (remoteHash != null) {
remote = REMOTE_PROTOCOLS.get(remoteHash);
if (remote != null)
return remote; // already cached
}
handshakeLock.lock();
try {
// force handshake
ByteBufferOutputStream bbo = new ByteBufferOutputStream();
// direct because the payload is tiny.
Encoder out = ENCODER_FACTORY.directBinaryEncoder(bbo, null);
writeHandshake(out);
out.writeInt(0); // empty metadata
out.writeString(""); // bogus message name
List<ByteBuffer> response = getTransceiver().transceive(bbo.getBufferList());
ByteBufferInputStream bbi = new ByteBufferInputStream(response);
BinaryDecoder in = DecoderFactory.get().binaryDecoder(bbi, null);
readHandshake(in);
return this.remote;
} finally {
handshakeLock.unlock();
}
}
/** Writes a request message. */
public abstract void writeRequest(Schema schema, Object request, Encoder out) throws IOException;
@Deprecated // for compatibility in 1.5
public Object readResponse(Schema schema, Decoder in) throws IOException {
return readResponse(schema, schema, in);
}
/** Reads a response message. */
public abstract Object readResponse(Schema writer, Schema reader, Decoder in) throws IOException;
@Deprecated // for compatibility in 1.5
public Object readError(Schema schema, Decoder in) throws IOException {
return readError(schema, schema, in);
}
/** Reads an error message. */
public abstract Exception readError(Schema writer, Schema reader, Decoder in) throws IOException;
/**
* Handles callbacks from transceiver invocations.
*/
protected | by |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableTimeInterval.java | {
"start": 1431,
"end": 2857
} | class ____<T> implements Observer<T>, Disposable {
final Observer<? super Timed<T>> downstream;
final TimeUnit unit;
final Scheduler scheduler;
long lastTime;
Disposable upstream;
TimeIntervalObserver(Observer<? super Timed<T>> actual, TimeUnit unit, Scheduler scheduler) {
this.downstream = actual;
this.scheduler = scheduler;
this.unit = unit;
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
lastTime = scheduler.now(unit);
downstream.onSubscribe(this);
}
}
@Override
public void dispose() {
upstream.dispose();
}
@Override
public boolean isDisposed() {
return upstream.isDisposed();
}
@Override
public void onNext(T t) {
long now = scheduler.now(unit);
long last = lastTime;
lastTime = now;
long delta = now - last;
downstream.onNext(new Timed<>(t, delta, unit));
}
@Override
public void onError(Throwable t) {
downstream.onError(t);
}
@Override
public void onComplete() {
downstream.onComplete();
}
}
}
| TimeIntervalObserver |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.