language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/InterceptStrategy.java | {
"start": 970,
"end": 1756
} | interface ____ to allow an implementation to wrap processors in a route with interceptors. For
* example, a possible usecase is to gather performance statistics at the processor's level.
* <p/>
* Its <b>strongly</b> adviced to use an {@link org.apache.camel.AsyncProcessor} as the returned wrapped
* {@link Processor} which ensures the interceptor works well with the asynchronous routing engine. You can use the
* {@link org.apache.camel.support.processor.DelegateAsyncProcessor} to easily return an
* {@link org.apache.camel.AsyncProcessor} and override the
* {@link org.apache.camel.AsyncProcessor#process(org.apache.camel.Exchange, org.apache.camel.AsyncCallback)} to
* implement your interceptor logic. And just invoke the super method to <b>continue</b> routing.
*/
public | is |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/security/HttpSecurityRecorder.java | {
"start": 2530,
"end": 9414
} | class ____ {
private static final Logger log = Logger.getLogger(HttpSecurityRecorder.class);
private final RuntimeValue<VertxHttpConfig> httpConfig;
private final VertxHttpBuildTimeConfig httpBuildTimeConfig;
public HttpSecurityRecorder(final RuntimeValue<VertxHttpConfig> httpConfig, VertxHttpBuildTimeConfig httpBuildTimeConfig) {
this.httpConfig = httpConfig;
this.httpBuildTimeConfig = httpBuildTimeConfig;
}
public RuntimeValue<AuthenticationHandler> authenticationMechanismHandler(boolean proactiveAuthentication,
boolean propagateRoutingContext) {
return new RuntimeValue<>(new AuthenticationHandler(proactiveAuthentication, propagateRoutingContext));
}
public Handler<RoutingContext> getHttpAuthenticatorHandler(RuntimeValue<AuthenticationHandler> handlerRuntimeValue) {
return handlerRuntimeValue.getValue();
}
public void initializeHttpAuthenticatorHandler(RuntimeValue<AuthenticationHandler> handlerRuntimeValue,
BeanContainer beanContainer) {
handlerRuntimeValue.getValue().init(beanContainer.beanInstance(PathMatchingHttpSecurityPolicy.class),
HttpSecurityConfiguration.get().rolesMapping());
}
public Handler<RoutingContext> permissionCheckHandler() {
return new Handler<RoutingContext>() {
volatile HttpAuthorizer authorizer;
@Override
public void handle(RoutingContext event) {
if (authorizer == null) {
authorizer = CDI.current().select(HttpAuthorizer.class).get();
}
authorizer.checkPermission(event);
}
};
}
/**
* This handler resolves the identity, and will be mapped to the post location. Otherwise,
* for lazy auth the post will not be evaluated if there is no security rule for the post location.
*/
public void formAuthPostHandler(RuntimeValue<Router> httpRouter) {
HttpSecurityConfiguration config = HttpSecurityConfiguration.get();
if (config.formAuthEnabled()) {
httpRouter.getValue()
.post(config.formPostLocation())
.order(-1 * SecurityHandlerPriorities.FORM_AUTHENTICATION)
.handler(new Handler<RoutingContext>() {
@Override
public void handle(RoutingContext event) {
Uni<SecurityIdentity> user = event.get(QuarkusHttpUser.DEFERRED_IDENTITY_KEY);
user.subscribe().withSubscriber(new UniSubscriber<SecurityIdentity>() {
@Override
public void onSubscribe(UniSubscription uniSubscription) {
}
@Override
public void onItem(SecurityIdentity securityIdentity) {
// we expect that form-based authentication mechanism to recognize the post-location,
// authenticate and if user provided credentials in form attribute, response will be ended
if (!event.response().ended()) {
event.response().end();
}
}
@Override
public void onFailure(Throwable throwable) {
// with current builtin implementation if only form-based authentication mechanism the event here
// won't be ended or failed, but we check in case there is custom implementation that differs
if (!event.response().ended() && !event.failed()) {
event.fail(throwable);
}
}
});
}
});
}
}
public Supplier<EagerSecurityInterceptorStorage> createSecurityInterceptorStorage(
Map<RuntimeValue<MethodDescription>, Consumer<RoutingContext>> endpointRuntimeValToInterceptor,
Map<String, Consumer<RoutingContext>> classNameToInterceptor) {
final Map<MethodDescription, Consumer<RoutingContext>> endpointToInterceptor = new HashMap<>();
for (var entry : endpointRuntimeValToInterceptor.entrySet()) {
endpointToInterceptor.put(entry.getKey().getValue(), entry.getValue());
}
return new Supplier<EagerSecurityInterceptorStorage>() {
@Override
public EagerSecurityInterceptorStorage get() {
return new EagerSecurityInterceptorStorage(endpointToInterceptor, classNameToInterceptor);
}
};
}
public Supplier<Map<String, Object>> createAdditionalSecEventPropsSupplier() {
return new Supplier<Map<String, Object>>() {
@Override
public Map<String, Object> get() {
if (Arc.container().requestContext().isActive()) {
// if present, add RoutingContext from CDI request to the SecurityEvents produced in Security extension
// it's done this way as Security extension is not Vert.x based, but users find RoutingContext useful
var event = Arc.container().instance(CurrentVertxRequest.class).get().getCurrent();
if (event != null) {
if (event.user() instanceof QuarkusHttpUser user) {
return Map.of(RoutingContext.class.getName(), event, SecurityIdentity.class.getName(),
user.getSecurityIdentity());
}
return Map.of(RoutingContext.class.getName(), event);
}
}
return Map.of();
}
};
}
public RuntimeValue<CORSConfig> prepareHttpSecurityConfiguration(ShutdownContext shutdownContext) {
// this is done so that we prepare and validate HTTP Security config before the first incoming request
var config = HttpSecurityConfiguration.get(httpConfig.getValue(), httpBuildTimeConfig);
shutdownContext.addShutdownTask(HttpSecurityConfiguration::clear);
return new RuntimeValue<>(config.getCorsConfig());
}
public Supplier<FormAuthenticationMechanism> createFormAuthMechanism() {
return new Supplier<FormAuthenticationMechanism>() {
@Override
public FormAuthenticationMechanism get() {
return HttpSecurityConfiguration.get().getFormAuthenticationMechanism();
}
};
}
public static abstract | HttpSecurityRecorder |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/datageneration/matchers/source/FieldSpecificMatcher.java | {
"start": 20194,
"end": 20724
} | class ____ extends NumberMatcher {
FloatMatcher(
XContentBuilder actualMappings,
Settings.Builder actualSettings,
XContentBuilder expectedMappings,
Settings.Builder expectedSettings
) {
super(FieldType.FLOAT, actualMappings, actualSettings, expectedMappings, expectedSettings);
}
@Override
protected Object cast(Object value) {
return value instanceof Integer v ? v.floatValue() : value;
}
}
| FloatMatcher |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/AbstractOsBasedExecutionCondition.java | {
"start": 758,
"end": 851
} | class ____ OS-based {@link ExecutionCondition} implementations.
*
* @since 5.9
*/
abstract | for |
java | quarkusio__quarkus | integration-tests/opentelemetry-reactive/src/test/java/io/quarkus/it/opentelemetry/reactive/OpenTelemetryWithSpanAtStartupTest.java | {
"start": 3292,
"end": 3383
} | interface ____ {
@GET
void call();
}
public static | WireMockRestClient |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/util/types/Types.java | {
"start": 651,
"end": 689
} | class ____ {
/**
* Given a | Types |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/NoTypeInfoTest.java | {
"start": 618,
"end": 670
} | interface ____ {
}
final static | NoTypeInterface |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/StronglyTypeTimeTest.java | {
"start": 6043,
"end": 6440
} | class ____ {
private final Duration FOO = Duration.ofMillis(100);
public Duration get() {
return FOO;
}
}
""")
.doTest();
}
@Test
public void fieldRenaming() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import java.time.Duration;
| Test |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-common/src/main/java/org/apache/camel/dsl/yaml/common/exception/InvalidRouteException.java | {
"start": 1004,
"end": 1209
} | class ____ extends MarkedYamlEngineException {
public InvalidRouteException(Node node, String message) {
super(null, Optional.empty(), message, node.getStartMark());
}
}
| InvalidRouteException |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/util/UuidUtil.java | {
"start": 1387,
"end": 2766
} | class ____ {
private static final long[] EMPTY_LONG_ARRAY = {};
/**
* System property that may be used to seed the UUID generation with an integer value.
*/
public static final String UUID_SEQUENCE = "org.apache.logging.log4j.uuidSequence";
private static final Logger LOGGER = StatusLogger.getLogger();
private static final String ASSIGNED_SEQUENCES = "org.apache.logging.log4j.assignedSequences";
private static final AtomicInteger COUNT = new AtomicInteger(0);
private static final long TYPE1 = 0x1000L;
private static final byte VARIANT = (byte) 0x80;
private static final int SEQUENCE_MASK = 0x3FFF;
private static final long NUM_100NS_INTERVALS_SINCE_UUID_EPOCH = 0x01b21dd213814000L;
private static final long INITIAL_UUID_SEQNO =
PropertiesUtil.getProperties().getLongProperty(UUID_SEQUENCE, 0);
private static final long LOW_MASK = 0xffffffffL;
private static final long MID_MASK = 0xffff00000000L;
private static final long HIGH_MASK = 0xfff000000000000L;
private static final int NODE_SIZE = 8;
private static final int SHIFT_2 = 16;
private static final int SHIFT_4 = 32;
private static final int SHIFT_6 = 48;
private static final int HUNDRED_NANOS_PER_MILLI = 10000;
private static final long LEAST = initialize(NetUtils.getMacAddress());
/* This | UuidUtil |
java | apache__kafka | trogdor/src/main/java/org/apache/kafka/trogdor/coordinator/NodeManager.java | {
"start": 3724,
"end": 6749
} | class ____ {
private final long workerId;
private final String taskId;
private final TaskSpec spec;
private boolean shouldRun;
private WorkerState state;
ManagedWorker(long workerId, String taskId, TaskSpec spec,
boolean shouldRun, WorkerState state) {
this.workerId = workerId;
this.taskId = taskId;
this.spec = spec;
this.shouldRun = shouldRun;
this.state = state;
}
void tryCreate() {
try {
client.createWorker(new CreateWorkerRequest(workerId, taskId, spec));
} catch (Throwable e) {
log.error("{}: error creating worker {}.", node.name(), this, e);
}
}
void tryStop() {
try {
client.stopWorker(new StopWorkerRequest(workerId));
} catch (Throwable e) {
log.error("{}: error stopping worker {}.", node.name(), this, e);
}
}
@Override
public String toString() {
return String.format("%s_%d", taskId, workerId);
}
}
/**
* The node which we are managing.
*/
private final Node node;
/**
* The task manager.
*/
private final TaskManager taskManager;
/**
* A client for the Node's Agent.
*/
private final AgentClient client;
/**
* Maps task IDs to worker structures.
*/
private final Map<Long, ManagedWorker> workers;
/**
* An executor service which manages the thread dedicated to this node.
*/
private final ScheduledExecutorService executor;
/**
* The heartbeat runnable.
*/
private final NodeHeartbeat heartbeat;
/**
* A future which can be used to cancel the periodic hearbeat task.
*/
private ScheduledFuture<?> heartbeatFuture;
NodeManager(Node node, TaskManager taskManager) {
this.node = node;
this.taskManager = taskManager;
this.client = new AgentClient.Builder().
maxTries(1).
target(node.hostname(), Node.Util.getTrogdorAgentPort(node)).
build();
this.workers = new HashMap<>();
this.executor = Executors.newSingleThreadScheduledExecutor(
ThreadUtils.createThreadFactory("NodeManager(" + node.name() + ")",
false));
this.heartbeat = new NodeHeartbeat();
rescheduleNextHeartbeat(HEARTBEAT_DELAY_MS);
}
/**
* Reschedule the heartbeat runnable.
*
* @param initialDelayMs The initial delay to use.
*/
void rescheduleNextHeartbeat(long initialDelayMs) {
if (this.heartbeatFuture != null) {
this.heartbeatFuture.cancel(false);
}
this.heartbeatFuture = this.executor.scheduleAtFixedRate(heartbeat,
initialDelayMs, HEARTBEAT_DELAY_MS, TimeUnit.MILLISECONDS);
}
/**
* The heartbeat runnable.
*/
| ManagedWorker |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/runtime/operators/windowing/WindowOperatorContractTest.java | {
"start": 149033,
"end": 152605
} | class ____ implements TimeDomainAdaptor {
@Override
public void setIsEventTime(WindowAssigner<?, ?> mockAssigner) {
when(mockAssigner.isEventTime()).thenReturn(true);
}
public void advanceTime(OneInputStreamOperatorTestHarness testHarness, long timestamp)
throws Exception {
testHarness.processWatermark(new Watermark(timestamp));
}
@Override
public void registerTimer(Trigger.TriggerContext ctx, long timestamp) {
ctx.registerEventTimeTimer(timestamp);
}
@Override
public void deleteTimer(Trigger.TriggerContext ctx, long timestamp) {
ctx.deleteEventTimeTimer(timestamp);
}
@Override
public int numTimers(AbstractStreamOperatorTestHarness testHarness) {
return testHarness.numEventTimeTimers();
}
@Override
public int numTimersOtherDomain(AbstractStreamOperatorTestHarness testHarness) {
return testHarness.numProcessingTimeTimers();
}
@Override
public void shouldRegisterTimerOnElement(Trigger<?, TimeWindow> mockTrigger, long timestamp)
throws Exception {
shouldRegisterEventTimeTimerOnElement(mockTrigger, timestamp);
}
@Override
public void shouldDeleteTimerOnElement(Trigger<?, TimeWindow> mockTrigger, long timestamp)
throws Exception {
shouldDeleteEventTimeTimerOnElement(mockTrigger, timestamp);
}
@Override
public void shouldContinueOnTime(Trigger<?, TimeWindow> mockTrigger) throws Exception {
shouldContinueOnEventTime(mockTrigger);
}
@Override
public void shouldFireOnTime(Trigger<?, TimeWindow> mockTrigger) throws Exception {
shouldFireOnEventTime(mockTrigger);
}
@Override
public void shouldFireAndPurgeOnTime(Trigger<?, TimeWindow> mockTrigger) throws Exception {
shouldFireAndPurgeOnEventTime(mockTrigger);
}
@Override
public void shouldPurgeOnTime(Trigger<?, TimeWindow> mockTrigger) throws Exception {
shouldPurgeOnEventTime(mockTrigger);
}
@Override
public void verifyTriggerCallback(
Trigger<?, TimeWindow> mockTrigger,
VerificationMode verificationMode,
Long time,
TimeWindow window)
throws Exception {
if (time == null && window == null) {
verify(mockTrigger, verificationMode)
.onEventTime(anyLong(), anyTimeWindow(), anyTriggerContext());
} else if (time == null) {
verify(mockTrigger, verificationMode)
.onEventTime(anyLong(), eq(window), anyTriggerContext());
} else if (window == null) {
verify(mockTrigger, verificationMode)
.onEventTime(eq(time), anyTimeWindow(), anyTriggerContext());
} else {
verify(mockTrigger, verificationMode)
.onEventTime(eq(time), eq(window), anyTriggerContext());
}
}
@Override
public void verifyCorrectTime(
OneInputStreamOperatorTestHarness testHarness,
InternalWindowFunction.InternalWindowContext context) {
assertThat(context.currentWatermark()).isEqualTo(testHarness.getCurrentWatermark());
}
}
private static | EventTimeAdaptor |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/targetclass/mixed/PreDestroyOnTargetClassAndOutsideAndManySuperclassesWithOverridesTest.java | {
"start": 1297,
"end": 1506
} | class ____ {
@PreDestroy
void intercept() throws Exception {
MyBean.invocations.add("this should not be called as the method is overridden in MyBean");
}
}
static | Alpha |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorJob.java | {
"start": 2973,
"end": 3283
} | interface ____ { public ArrayList<Entry>
* generateKeyValPairs(Object key, Object value); public void
* configure(Configuration conf); }
*
* The package also provides a base class, ValueAggregatorBaseDescriptor,
* implementing the above interface. The user can extend the base | ValueAggregatorDescriptor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryAssignmentTest.java | {
"start": 8446,
"end": 8917
} | class ____ {
@Inject(optional = true)
boolean myFoo;
void sin() {
myFoo = false;
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void optionalInject_andMock_flagged() {
testHelper
.addSourceLines(
"Test.java",
"""
import com.google.inject.Inject;
import org.mockito.Mock;
| Test |
java | elastic__elasticsearch | qa/vector/src/main/java/org/elasticsearch/test/knn/KnnIndexTester.java | {
"start": 2951,
"end": 3086
} | class ____ create and test KNN indices using Lucene.
* It supports various index types (HNSW, FLAT, IVF) and configurations.
*/
public | to |
java | apache__camel | components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvMarshalTest.java | {
"start": 1224,
"end": 1268
} | class ____ standard marshalling
*/
public | tests |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-26/src/main/java/org/redisson/spring/data/connection/RedissonReactiveKeyCommands.java | {
"start": 1787,
"end": 14606
} | class ____ extends RedissonBaseReactive implements ReactiveKeyCommands {
public RedissonReactiveKeyCommands(CommandReactiveExecutor executorService) {
super(executorService);
}
@Override
public Flux<BooleanResponse<KeyCommand>> exists(Publisher<KeyCommand> keys) {
return execute(keys, key -> {
Assert.notNull(key.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(key.getKey());
Mono<Boolean> m = read(keyBuf, StringCodec.INSTANCE, RedisCommands.EXISTS, keyBuf);
return m.map(v -> new BooleanResponse<>(key, v));
});
}
private static final RedisStrictCommand<DataType> TYPE = new RedisStrictCommand<DataType>("TYPE", new Convertor<DataType>() {
@Override
public DataType convert(Object obj) {
return DataType.fromCode(obj.toString());
}
});
@Override
public Flux<CommandResponse<KeyCommand, DataType>> type(Publisher<KeyCommand> keys) {
return execute(keys, key -> {
Assert.notNull(key.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(key.getKey());
Mono<DataType> m = read(keyBuf, StringCodec.INSTANCE, TYPE, keyBuf);
return m.map(v -> new CommandResponse<>(key, v));
});
}
@Override
public Flux<NumericResponse<Collection<ByteBuffer>, Long>> touch(Publisher<Collection<ByteBuffer>> keys) {
return execute(keys, coll -> {
Assert.notNull(coll, "Collection must not be null!");
Object[] params = coll.stream().map(buf -> toByteArray(buf)).toArray(Object[]::new);
Mono<Long> m = write(null, StringCodec.INSTANCE, RedisCommands.TOUCH_LONG, params);
return m.map(v -> new NumericResponse<>(coll, v));
});
}
@Override
public Flux<MultiValueResponse<ByteBuffer, ByteBuffer>> keys(Publisher<ByteBuffer> patterns) {
return execute(patterns, pattern -> {
Assert.notNull(pattern, "Pattern must not be null!");
Mono<List<String>> m = read(null, StringCodec.INSTANCE, RedisCommands.KEYS, toByteArray(pattern));
return m.map(v -> {
List<ByteBuffer> values = v.stream().map(t -> ByteBuffer.wrap(t.getBytes())).collect(Collectors.toList());
return new MultiValueResponse<>(pattern, values);
});
});
}
@Override
public Flux<ByteBuffer> scan(ScanOptions options) {
RedissonKeysReactive reactive = new RedissonKeysReactive(executorService);
return reactive.getKeysByPattern(options.getPattern(), options.getCount().intValue()).map(t -> ByteBuffer.wrap(t.getBytes()));
}
@Override
public Mono<ByteBuffer> randomKey() {
return executorService.reactive(() -> {
return executorService.readRandomAsync(ByteArrayCodec.INSTANCE, RedisCommands.RANDOM_KEY);
});
}
static final RedisStrictCommand<String> RENAME = new RedisStrictCommand<String>("RENAME");
@Override
public Flux<BooleanResponse<RenameCommand>> rename(Publisher<RenameCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getNewName(), "New name must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
byte[] newKeyBuf = toByteArray(command.getNewName());
Mono<String> m = write(keyBuf, StringCodec.INSTANCE, RENAME, keyBuf, newKeyBuf);
return m.map(v -> new BooleanResponse<>(command, true));
});
}
@Override
public Flux<BooleanResponse<RenameCommand>> renameNX(Publisher<RenameCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getNewName(), "New name must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
byte[] newKeyBuf = toByteArray(command.getNewName());
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.RENAMENX, keyBuf, newKeyBuf);
return m.map(v -> new BooleanResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<KeyCommand, Long>> del(Publisher<KeyCommand> keys) {
Flux<KeyCommand> s = Flux.from(keys);
return s.concatMap(command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.DEL, keyBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<List<ByteBuffer>, Long>> mDel(Publisher<List<ByteBuffer>> keys) {
return execute(keys, coll -> {
Assert.notNull(coll, "List must not be null!");
Object[] params = coll.stream().map(buf -> toByteArray(buf)).toArray(Object[]::new);
Mono<Long> m = write(null, StringCodec.INSTANCE, RedisCommands.DEL, params);
return m.map(v -> new NumericResponse<>(coll, v));
});
}
@Override
public Flux<NumericResponse<KeyCommand, Long>> unlink(Publisher<KeyCommand> keys) {
return execute(keys, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.UNLINK, keyBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<List<ByteBuffer>, Long>> mUnlink(Publisher<List<ByteBuffer>> keys) {
return execute(keys, coll -> {
Assert.notNull(coll, "List must not be null!");
Object[] params = coll.stream().map(buf -> toByteArray(buf)).toArray(Object[]::new);
Mono<Long> m = write(null, StringCodec.INSTANCE, RedisCommands.UNLINK, params);
return m.map(v -> new NumericResponse<>(coll, v));
});
}
private static final RedisStrictCommand<Boolean> EXPIRE = new RedisStrictCommand<Boolean>("EXPIRE", new BooleanReplayConvertor());
@Override
public Flux<BooleanResponse<ExpireCommand>> expire(Publisher<ExpireCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, EXPIRE, keyBuf, command.getTimeout().getSeconds());
return m.map(v -> new BooleanResponse<>(command, v));
});
}
@Override
public Flux<BooleanResponse<ExpireCommand>> pExpire(Publisher<ExpireCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.PEXPIRE, keyBuf, command.getTimeout().toMillis());
return m.map(v -> new BooleanResponse<>(command, v));
});
}
private static final RedisStrictCommand<Boolean> EXPIREAT = new RedisStrictCommand<Boolean>("EXPIREAT", new BooleanReplayConvertor());
@Override
public Flux<BooleanResponse<ExpireAtCommand>> expireAt(Publisher<ExpireAtCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, EXPIREAT, keyBuf, command.getExpireAt().getEpochSecond());
return m.map(v -> new BooleanResponse<>(command, v));
});
}
@Override
public Flux<BooleanResponse<ExpireAtCommand>> pExpireAt(Publisher<ExpireAtCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.PEXPIREAT, keyBuf, command.getExpireAt().toEpochMilli());
return m.map(v -> new BooleanResponse<>(command, v));
});
}
@Override
public Flux<BooleanResponse<KeyCommand>> persist(Publisher<KeyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.PERSIST, keyBuf);
return m.map(v -> new BooleanResponse<>(command, v));
});
}
private static final RedisStrictCommand<Long> TTL = new RedisStrictCommand<Long>("TTL");
@Override
public Flux<NumericResponse<KeyCommand, Long>> ttl(Publisher<KeyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = read(keyBuf, StringCodec.INSTANCE, TTL, keyBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<KeyCommand, Long>> pTtl(Publisher<KeyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = read(keyBuf, StringCodec.INSTANCE, RedisCommands.PTTL, keyBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<BooleanResponse<MoveCommand>> move(Publisher<MoveCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getDatabase(), "Database must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.MOVE, keyBuf, command.getDatabase());
return m.map(v -> new BooleanResponse<>(command, v));
});
}
private static final RedisStrictCommand<ValueEncoding> OBJECT_ENCODING = new RedisStrictCommand<ValueEncoding>("OBJECT", "ENCODING", new Convertor<ValueEncoding>() {
@Override
public ValueEncoding convert(Object obj) {
return ValueEncoding.of((String) obj);
}
});
@Override
public Mono<ValueEncoding> encodingOf(ByteBuffer key) {
Assert.notNull(key, "Key must not be null!");
byte[] keyBuf = toByteArray(key);
return read(keyBuf, StringCodec.INSTANCE, OBJECT_ENCODING, keyBuf);
}
private static final RedisStrictCommand<Long> OBJECT_IDLETIME = new RedisStrictCommand<Long>("OBJECT", "IDLETIME");
@Override
public Mono<Duration> idletime(ByteBuffer key) {
Assert.notNull(key, "Key must not be null!");
byte[] keyBuf = toByteArray(key);
Mono<Long> m = read(keyBuf, StringCodec.INSTANCE, OBJECT_IDLETIME, keyBuf);
return m.map(Duration::ofSeconds);
}
private static final RedisStrictCommand<Long> OBJECT_REFCOUNT = new RedisStrictCommand<Long>("OBJECT", "REFCOUNT");
@Override
public Mono<Long> refcount(ByteBuffer key) {
Assert.notNull(key, "Key must not be null!");
byte[] keyBuf = toByteArray(key);
return read(keyBuf, StringCodec.INSTANCE, OBJECT_REFCOUNT, keyBuf);
}
@Override
public Flux<BooleanResponse<CopyCommand>> copy(Publisher<CopyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getTarget(), "Target must not be null!");
List<Object> params = new ArrayList<>();
byte[] keyBuf = toByteArray(command.getKey());
params.add(keyBuf);
byte[] targetBuf = toByteArray(command.getTarget());
params.add(targetBuf);
if (command.getDatabase() != null) {
params.add("DB");
params.add(command.getDatabase());
}
Mono<Boolean> m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.COPY, params.toArray());
return m.map(v -> new BooleanResponse<>(command, v));
});
}
}
| RedissonReactiveKeyCommands |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/transformations/MultipleInputTransformation.java | {
"start": 1192,
"end": 2018
} | class ____<OUT> extends AbstractMultipleInputTransformation<OUT> {
public MultipleInputTransformation(
String name,
StreamOperatorFactory<OUT> operatorFactory,
TypeInformation<OUT> outputType,
int parallelism) {
super(name, operatorFactory, outputType, parallelism);
}
public MultipleInputTransformation(
String name,
StreamOperatorFactory<OUT> operatorFactory,
TypeInformation<OUT> outputType,
int parallelism,
boolean parallelismConfigured) {
super(name, operatorFactory, outputType, parallelism, parallelismConfigured);
}
public MultipleInputTransformation<OUT> addInput(Transformation<?> input) {
inputs.add(input);
return this;
}
}
| MultipleInputTransformation |
java | quarkusio__quarkus | test-framework/common/src/main/java/io/quarkus/test/common/http/TestHTTPResource.java | {
"start": 562,
"end": 1125
} | interface ____ {
/**
* @return The path part of the URL
*/
String value() default "";
/**
* @return If the URL should use the HTTPS protocol and SSL port
* @deprecated use #tls instead
*/
@Deprecated(since = "3.10", forRemoval = true)
boolean ssl() default false;
/**
* @return if the url should use the management interface
*/
boolean management() default false;
/**
* @return If the URL should use the HTTPS protocol and TLS port
*/
boolean tls() default false;
}
| TestHTTPResource |
java | quarkusio__quarkus | independent-projects/bootstrap/maven-resolver/src/test/java/io/quarkus/bootstrap/resolver/maven/test/PreferPomsFromWorkspaceTest.java | {
"start": 515,
"end": 1819
} | class ____ extends BootstrapMavenContextTestBase {
@Test
public void preferPomsFromWorkspace() throws Exception {
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-alternate-pom/root/module2",
BootstrapMavenContext.config().setPreferPomsFromWorkspace(true));
final Artifact artifact = new DefaultArtifact(mvn.getCurrentProject().getGroupId(),
mvn.getCurrentProject().getArtifactId(), null, "pom", mvn.getCurrentProject().getVersion());
final ArtifactDescriptorResult descriptor = mvn.getRepositorySystem().readArtifactDescriptor(
mvn.getRepositorySystemSession(),
new ArtifactDescriptorRequest().setArtifact(artifact).setRepositories(mvn.getRemoteRepositories()));
final List<Dependency> managedDeps = descriptor.getManagedDependencies();
assertEquals(1, managedDeps.size());
assertDependency(managedDeps.get(0));
assertEquals(1, descriptor.getDependencies().size());
assertDependency(descriptor.getDependencies().get(0));
}
private void assertDependency(Dependency d) {
assertEquals("acme-other", d.getArtifact().getArtifactId());
assertEquals("1.0", d.getArtifact().getVersion());
}
}
| PreferPomsFromWorkspaceTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/admin/DescribeTopicsOptions.java | {
"start": 943,
"end": 2600
} | class ____ extends AbstractOptions<DescribeTopicsOptions> {
private boolean includeAuthorizedOperations;
private int partitionSizeLimitPerResponse = 2000;
/**
* Set the timeout in milliseconds for this operation or {@code null} if the default api timeout for the
* AdminClient should be used.
*
*/
// This method is retained to keep binary compatibility with 0.11
public DescribeTopicsOptions timeoutMs(Integer timeoutMs) {
this.timeoutMs = timeoutMs;
return this;
}
public DescribeTopicsOptions includeAuthorizedOperations(boolean includeAuthorizedOperations) {
this.includeAuthorizedOperations = includeAuthorizedOperations;
return this;
}
/**
* Sets the maximum number of partitions to be returned in a single response.
* <p>
* <strong>This option:</strong>
* <ul>
* <li>Is only effective when using topic names (not topic IDs).</li>
* <li>Will not be effective if it is larger than the server-side configuration
* {@code max.request.partition.size.limit}.
* </li>
* </ul>
*
* @param partitionSizeLimitPerResponse the maximum number of partitions per response
*/
public DescribeTopicsOptions partitionSizeLimitPerResponse(int partitionSizeLimitPerResponse) {
this.partitionSizeLimitPerResponse = partitionSizeLimitPerResponse;
return this;
}
public boolean includeAuthorizedOperations() {
return includeAuthorizedOperations;
}
public int partitionSizeLimitPerResponse() {
return partitionSizeLimitPerResponse;
}
}
| DescribeTopicsOptions |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/SimpleValueGenerationBaselineTests.java | {
"start": 1612,
"end": 2161
} | class ____ {
@Id
private Integer id;
private String name;
private String lastName;
private java.sql.Date vmCreatedSqlDate;
private Time vmCreatedSqlTime;
private Timestamp vmCreatedSqlTimestamp;
private NonAuditedEntity() {
}
private NonAuditedEntity(Integer id) {
this.id = id;
name = "it";
vmCreatedSqlDate = new java.sql.Date( System.currentTimeMillis() );
vmCreatedSqlTime = new Time( System.currentTimeMillis() );
vmCreatedSqlTimestamp = new Timestamp( System.currentTimeMillis() );
}
}
}
| NonAuditedEntity |
java | netty__netty | testsuite-jpms/src/test/java/io/netty/testsuite_jpms/test/CodecHttp2Test.java | {
"start": 2792,
"end": 8206
} | class ____ {
@Test
public void smokeTest() throws Exception {
ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.channel(NioServerSocketChannel.class);
MultiThreadIoEventLoopGroup eventLoopGroup = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory());
serverBootstrap.group(eventLoopGroup);
serverBootstrap.childHandler(new ChannelInitializer<>() {
@Override
protected void initChannel(Channel ch) {
ch.pipeline().addLast(Http2FrameCodecBuilder.forServer().build(), new ChannelDuplexHandler() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof Http2HeadersFrame) {
Http2HeadersFrame streamFrame = (Http2HeadersFrame) msg;
if (streamFrame.isEndStream()) {
ByteBuf body = ctx.alloc().buffer();
body.writeCharSequence("Hello World", StandardCharsets.UTF_8);
Http2Headers headers = new DefaultHttp2Headers().status(OK.codeAsText());
Http2FrameStream stream = streamFrame.stream();
ctx.write(new DefaultHttp2HeadersFrame(headers).stream(stream));
ctx.write(new DefaultHttp2DataFrame(body, true).stream(stream));
}
} else {
super.channelRead(ctx, msg);
}
}
});
}
});
ChannelFuture server = serverBootstrap.bind("localhost", 8080).sync();
try {
Bootstrap clientBootstrap = new Bootstrap()
.group(eventLoopGroup)
.channel(NioSocketChannel.class)
.handler(new ChannelInitializer<>() {
@Override
protected void initChannel(Channel ch) {
Http2FrameCodec http2FrameCodec = Http2FrameCodecBuilder.forClient().build();
ch.pipeline().addLast(http2FrameCodec);
ch.pipeline().addLast(new Http2MultiplexHandler(new SimpleChannelInboundHandler<>() {
@Override
protected void channelRead0(ChannelHandlerContext ctx, Object msg) {
}
}));
}
});
CompletableFuture<String> responseFut = new CompletableFuture<>();
Channel client = clientBootstrap.handler(new ChannelInitializer<SocketChannel>() {
Http2Connection connection;
Http2ConnectionHandler connectionHandler;
@Override
protected void initChannel(SocketChannel ch) {
connection = new DefaultHttp2Connection(false);
Http2EventAdapter http2EventHandler = new Http2EventAdapter() {
@Override
public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings)
throws Http2Exception {
Http2Connection.Endpoint<Http2LocalFlowController> endpoint = connection.local();
Http2Stream stream = endpoint.createStream(1, false);
Http2Headers headers = new DefaultHttp2Headers()
.method("GET")
.path("/")
.scheme("http");
ChannelPromise promise = ctx.newPromise();
Http2ConnectionEncoder encoder = connectionHandler.encoder();
encoder.writeHeaders(ctx, 1, headers, 0, true, promise);
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data,
int padding, boolean endOfStream) throws Http2Exception {
if (endOfStream) {
responseFut.complete(data.toString(StandardCharsets.UTF_8));
}
return super.onDataRead(ctx, streamId, data, padding, endOfStream);
}
};
connectionHandler = new Http2ConnectionHandlerBuilder()
.frameListener(new DelegatingDecompressorFrameListener(
connection,
http2EventHandler))
.connection(connection)
.build();
ch.pipeline().addLast(connectionHandler);
}
}).connect("localhost", 8080).syncUninterruptibly().channel();
String resp = responseFut.get(20, TimeUnit.SECONDS);
assertEquals("Hello World", resp);
// Wait until the connection is closed.
server.channel().close().syncUninterruptibly();
client.close().syncUninterruptibly();
} finally {
eventLoopGroup.shutdownGracefully();
}
}
}
| CodecHttp2Test |
java | apache__logging-log4j2 | log4j-api-test/src/main/java/org/apache/logging/log4j/test/junit/TestPropertySource.java | {
"start": 1150,
"end": 3303
} | class ____ implements PropertySource {
private static final String PREFIX = "log4j2.";
private static final Namespace NAMESPACE = ExtensionContextAnchor.LOG4J2_NAMESPACE.append("properties");
private static final TestProperties EMPTY_PROPERTIES = new EmptyTestProperties();
@Override
public int getPriority() {
// Highest priority
return Integer.MIN_VALUE;
}
public static TestProperties createProperties(final ExtensionContext context) {
TestProperties props = getProperties(context);
// Make sure that the properties do not come from the parent ExtensionContext
if (props instanceof JUnitTestProperties && context.equals(((JUnitTestProperties) props).getContext())) {
return props;
}
props = new JUnitTestProperties(context);
ExtensionContextAnchor.setAttribute(TestProperties.class, props, context);
return props;
}
public static TestProperties getProperties() {
return getProperties(null);
}
private static TestProperties getProperties(final ExtensionContext context) {
final ExtensionContext actualContext = context != null ? context : ExtensionContextAnchor.getContext();
if (actualContext != null) {
final TestProperties props =
ExtensionContextAnchor.getAttribute(TestProperties.class, TestProperties.class, actualContext);
if (props != null) {
return props;
}
}
return EMPTY_PROPERTIES;
}
@Override
public CharSequence getNormalForm(final Iterable<? extends CharSequence> tokens) {
final CharSequence camelCase = Util.joinAsCamelCase(tokens);
// Do not use Strings to prevent recursive initialization
return camelCase.length() > 0 ? PREFIX + camelCase.toString() : null;
}
@Override
public String getProperty(final String key) {
return getProperties().getProperty(key);
}
@Override
public boolean containsProperty(final String key) {
return getProperties().containsProperty(key);
}
private static | TestPropertySource |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/internal/DefaultPostLoadEventListener.java | {
"start": 798,
"end": 2215
} | class ____ implements PostLoadEventListener, CallbackRegistryConsumer {
private CallbackRegistry callbackRegistry;
@Override
public void injectCallbackRegistry(CallbackRegistry callbackRegistry) {
this.callbackRegistry = callbackRegistry;
}
@Override
public void onPostLoad(PostLoadEvent event) {
final Object entity = event.getEntity();
callbackRegistry.postLoad( entity );
final var session = event.getSession();
final var entry = session.getPersistenceContextInternal().getEntry( entity );
if ( entry == null ) {
throw new AssertionFailure( "possible non-threadsafe access to the session" );
}
final var lockMode = entry.getLockMode();
if ( lockMode.requiresVersion() ) {
final var persister = entry.getPersister();
if ( persister.isVersioned() ) {
switch ( lockMode ) {
case PESSIMISTIC_FORCE_INCREMENT:
OptimisticLockHelper.forceVersionIncrement( entity, entry, session );
break;
case OPTIMISTIC_FORCE_INCREMENT:
session.getActionQueue().registerCallback( new EntityIncrementVersionProcess( entity ) );
break;
case OPTIMISTIC:
session.getActionQueue().registerCallback( new EntityVerifyVersionProcess( entity ) );
break;
}
}
else {
throw new HibernateException("[" + lockMode
+ "] not supported for non-versioned entities [" + persister.getEntityName() + "]");
}
}
}
}
| DefaultPostLoadEventListener |
java | elastic__elasticsearch | x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/PushExpressionToLoadIT.java | {
"start": 2345,
"end": 34363
} | class ____ extends ESRestTestCase {
@ClassRule
public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test"));
@Rule(order = Integer.MIN_VALUE)
public ProfileLogger profileLogger = new ProfileLogger();
@Before
public void checkPushCapability() throws IOException {
assumeTrue(
"requires " + EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.capabilityName(),
clusterHasCapability(
"POST",
"_query",
List.of(),
List.of(EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.capabilityName())
).orElseGet(() -> false)
);
}
public void testLengthToKeyword() throws IOException {
String value = "v".repeat(between(0, 256));
test(
justType("keyword"),
b -> b.field("test", value),
"| EVAL test = LENGTH(test)",
matchesList().item(value.length()),
matchesMap().entry("test:column_at_a_time:Utf8CodePointsFromOrds.Singleton", 1)
);
}
/**
* We don't support fusing {@code LENGTH} into loading {@code wildcard} fields because
* we haven't written support for fusing functions to loading from its source format.
* We haven't done that because {@code wildcard} fields aren't super common.
*/
public void testLengthNotPushedToWildcard() throws IOException {
String value = "v".repeat(between(0, 256));
test(
justType("wildcard"),
b -> b.field("test", value),
"| EVAL test = LENGTH(test)",
matchesList().item(value.length()),
matchesMap().entry("test:column_at_a_time:BlockDocValuesReader.BytesCustom", 1)
);
}
/**
* We don't support fusing {@code LENGTH} into loading {@code text} fields because
* we haven't written support for fusing functions to loading from {@code _source}.
* Usually folks that want to go superfast will use doc values. But those aren't
* even available for {@code text} fields.
*/
public void testLengthNotPushedToText() throws IOException {
String value = "v".repeat(between(0, 256));
test(
justType("text"),
b -> b.field("test", value),
"| EVAL test = LENGTH(test)",
matchesList().item(value.length()),
matchesMap().entry("test:column_at_a_time:null", 1)
.entry("stored_fields[requires_source:true, fields:0, sequential: false]", 1)
.entry("test:row_stride:BlockSourceReader.Bytes", 1)
);
}
public void testMvMinToKeyword() throws IOException {
String min = "a".repeat(between(1, 256));
String max = "b".repeat(between(1, 256));
test(
justType("keyword"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(min),
matchesMap().entry("test:column_at_a_time:MvMinBytesRefsFromOrds.SortedSet", 1)
);
}
public void testMvMinToIp() throws IOException {
String min = "192.168.0." + between(0, 255);
String max = "192.168.3." + between(0, 255);
test(
justType("ip"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(min),
matchesMap().entry("test:column_at_a_time:MvMinBytesRefsFromOrds.SortedSet", 1)
);
}
public void testMvMinToHalfFloat() throws IOException {
double min = randomDouble();
double max = 1 + randomDouble();
test(
justType("half_float"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(closeTo(min, .1)),
matchesMap().entry("test:column_at_a_time:MvMinDoublesFromDocValues.Sorted", 1)
);
}
public void testMvMinToFloat() throws IOException {
double min = randomDouble();
double max = 1 + randomDouble();
test(
justType("float"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(closeTo(min, .1)),
matchesMap().entry("test:column_at_a_time:MvMinDoublesFromDocValues.Sorted", 1)
);
}
public void testMvMinToDouble() throws IOException {
double min = randomDouble();
double max = 1 + randomDouble();
test(
justType("double"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(min),
matchesMap().entry("test:column_at_a_time:MvMinDoublesFromDocValues.Sorted", 1)
);
}
public void testMvMinToByte() throws IOException {
int min = between(Byte.MIN_VALUE, Byte.MAX_VALUE - 10);
int max = between(min + 1, Byte.MAX_VALUE);
test(
justType("byte"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(min),
matchesMap().entry("test:column_at_a_time:MvMinIntsFromDocValues.Sorted", 1)
);
}
public void testMvMinToShort() throws IOException {
int min = between(Short.MIN_VALUE, Short.MAX_VALUE - 10);
int max = between(min + 1, Short.MAX_VALUE);
test(
justType("short"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(min),
matchesMap().entry("test:column_at_a_time:MvMinIntsFromDocValues.Sorted", 1)
);
}
public void testMvMinToInt() throws IOException {
int min = between(Integer.MIN_VALUE, Integer.MAX_VALUE - 10);
int max = between(min + 1, Integer.MAX_VALUE);
test(
justType("integer"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(min),
matchesMap().entry("test:column_at_a_time:MvMinIntsFromDocValues.Sorted", 1)
);
}
public void testMvMinToLong() throws IOException {
long min = randomLongBetween(Long.MIN_VALUE, Long.MAX_VALUE - 10);
long max = randomLongBetween(min + 1, Long.MAX_VALUE);
test(
justType("long"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MIN(test)",
matchesList().item(min),
matchesMap().entry("test:column_at_a_time:MvMinLongsFromDocValues.Sorted", 1)
);
}
public void testMvMaxToKeyword() throws IOException {
String min = "a".repeat(between(1, 256));
String max = "b".repeat(between(1, 256));
test(
justType("keyword"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(max),
matchesMap().entry("test:column_at_a_time:MvMaxBytesRefsFromOrds.SortedSet", 1)
);
}
public void testMvMaxToIp() throws IOException {
String min = "192.168.0." + between(0, 255);
String max = "192.168.3." + between(0, 255);
test(
justType("ip"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(max),
matchesMap().entry("test:column_at_a_time:MvMaxBytesRefsFromOrds.SortedSet", 1)
);
}
public void testMvMaxToByte() throws IOException {
int min = between(Byte.MIN_VALUE, Byte.MAX_VALUE - 10);
int max = between(min + 1, Byte.MAX_VALUE);
test(
justType("byte"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(max),
matchesMap().entry("test:column_at_a_time:MvMaxIntsFromDocValues.Sorted", 1)
);
}
public void testMvMaxToShort() throws IOException {
int min = between(Short.MIN_VALUE, Short.MAX_VALUE - 10);
int max = between(min + 1, Short.MAX_VALUE);
test(
justType("short"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(max),
matchesMap().entry("test:column_at_a_time:MvMaxIntsFromDocValues.Sorted", 1)
);
}
public void testMvMaxToInt() throws IOException {
int min = between(Integer.MIN_VALUE, Integer.MAX_VALUE - 10);
int max = between(min + 1, Integer.MAX_VALUE);
test(
justType("integer"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(max),
matchesMap().entry("test:column_at_a_time:MvMaxIntsFromDocValues.Sorted", 1)
);
}
public void testMvMaxToLong() throws IOException {
long min = randomLongBetween(Long.MIN_VALUE, Long.MAX_VALUE - 10);
long max = randomLongBetween(min + 1, Long.MAX_VALUE);
test(
justType("long"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(max),
matchesMap().entry("test:column_at_a_time:MvMaxLongsFromDocValues.Sorted", 1)
);
}
public void testMvMaxToHalfFloat() throws IOException {
double min = randomDouble();
double max = 1 + randomDouble();
test(
justType("half_float"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(closeTo(max, .1)),
matchesMap().entry("test:column_at_a_time:MvMaxDoublesFromDocValues.Sorted", 1)
);
}
public void testMvMaxToFloat() throws IOException {
double min = randomDouble();
double max = 1 + randomDouble();
test(
justType("float"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(closeTo(max, .1)),
matchesMap().entry("test:column_at_a_time:MvMaxDoublesFromDocValues.Sorted", 1)
);
}
public void testMvMaxToDouble() throws IOException {
double min = randomDouble();
double max = 1 + randomDouble();
test(
justType("double"),
b -> b.startArray("test").value(min).value(max).endArray(),
"| EVAL test = MV_MAX(test)",
matchesList().item(max),
matchesMap().entry("test:column_at_a_time:MvMaxDoublesFromDocValues.Sorted", 1)
);
}
public void testVCosine() throws IOException {
test(
justType("dense_vector"),
b -> b.startArray("test").value(128).value(128).value(0).endArray(),
"| EVAL test = V_COSINE(test, [0, 255, 255])",
matchesList().item(0.5),
matchesMap().entry("test:column_at_a_time:FloatDenseVectorFromDocValues.Normalized.V_COSINE", 1)
);
}
public void testVHammingToByte() throws IOException {
test(
b -> b.startObject("test").field("type", "dense_vector").field("element_type", "byte").endObject(),
b -> b.startArray("test").value(100).value(100).value(0).endArray(),
"| EVAL test = V_HAMMING(test, [0, 100, 100])",
matchesList().item(6.0),
matchesMap().entry("test:column_at_a_time:ByteDenseVectorFromDocValues.V_HAMMING", 1)
);
}
public void testVHammingToBit() throws IOException {
test(
b -> b.startObject("test").field("type", "dense_vector").field("element_type", "bit").endObject(),
b -> b.startArray("test").value(100).value(100).value(0).endArray(),
"| EVAL test = V_HAMMING(test, [0, 100, 100])",
matchesList().item(6.0),
matchesMap().entry("test:column_at_a_time:BitDenseVectorFromDocValues.V_HAMMING", 1)
);
}
//
// Tests for more complex shapes.
//
/**
* Tests {@code LENGTH} on a field that comes from a {@code LOOKUP JOIN}.
*/
public void testLengthNotPushedToLookupJoinKeyword() throws IOException {
initLookupIndex();
test(
b -> b.startObject("main_matching").field("type", "keyword").endObject(),
b -> b.field("main_matching", "lookup"),
"""
| LOOKUP JOIN lookup ON matching == main_matching
| EVAL test = LENGTH(test)
""",
matchesList().item(1),
matchesMap().entry("main_matching:column_at_a_time:BytesRefsFromOrds.Singleton", 1),
sig -> assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator") // the real work is here, checkOperatorProfile checks the status
.item("LookupOperator")
.item("EvalOperator") // this one just renames the field
.item("AggregationOperator")
.item("ExchangeSinkOperator")
)
);
}
/**
* Tests {@code LENGTH} on a field that comes from a {@code LOOKUP JOIN} with
* the added complexity that the field also exists in the index, but we're not
* querying it.
*/
public void testLengthNotPushedToLookupJoinKeywordSameName() throws IOException {
String value = "v".repeat(between(0, 256));
initLookupIndex();
test(b -> {
b.startObject("test").field("type", "keyword").endObject();
b.startObject("main_matching").field("type", "keyword").endObject();
},
b -> b.field("test", value).field("main_matching", "lookup"),
"""
| DROP test
| LOOKUP JOIN lookup ON matching == main_matching
| EVAL test = LENGTH(test)
""",
matchesList().item(1),
matchesMap().entry("main_matching:column_at_a_time:BytesRefsFromOrds.Singleton", 1),
sig -> assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator") // the real work is here, checkOperatorProfile checks the status
.item("LookupOperator")
.item("EvalOperator") // this one just renames the field
.item("AggregationOperator")
.item("ExchangeSinkOperator")
)
);
}
/**
* Tests {@code LENGTH} on a field that comes from a {@code LOOKUP JOIN}.
*/
public void testLengthPushedInsideInlineStats() throws IOException {
String value = "v".repeat(between(0, 256));
test(
justType("keyword"),
b -> b.field("test", value),
"""
| INLINE STATS max_length = MAX(LENGTH(test))
| EVAL test = LENGTH(test)
| WHERE test == max_length
""",
matchesList().item(value.length()),
matchesMap().entry("test:column_at_a_time:Utf8CodePointsFromOrds.Singleton", 1),
sig -> {
// There are two data node plans, one for each phase.
if (sig.contains("FilterOperator")) {
assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator") // the real work is here, checkOperatorProfile checks the status
.item("FilterOperator")
.item("EvalOperator") // this one just renames the field
.item("AggregationOperator")
.item("ExchangeSinkOperator")
);
} else {
assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator") // the real work is here, checkOperatorProfile checks the status
.item("EvalOperator") // this one just renames the field
.item("AggregationOperator")
.item("ExchangeSinkOperator")
);
}
}
);
}
/**
* Tests {@code LENGTH} on a field that comes from a {@code LOOKUP JOIN}.
*/
public void testLengthNotPushedToInlineStatsResults() throws IOException {
String value = "v".repeat(between(0, 256));
test(justType("keyword"), b -> b.field("test", value), """
| INLINE STATS test2 = VALUES(test)
| EVAL test = LENGTH(test2)
""", matchesList().item(value.length()), matchesMap().entry("test:column_at_a_time:BytesRefsFromOrds.Singleton", 1), sig -> {
// There are two data node plans, one for each phase.
if (sig.contains("EvalOperator")) {
assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("EvalOperator") // The second phase of the INLINE STATS
.item("AggregationOperator")
.item("ExchangeSinkOperator")
);
} else {
assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator")
.item("AggregationOperator")
.item("ExchangeSinkOperator")
);
}
});
}
/**
* Tests {@code LENGTH} on a field that comes from a {@code LOOKUP JOIN}.
*/
public void testLengthNotPushedToGroupedInlineStatsResults() throws IOException {
String value = "v".repeat(between(0, 256));
CheckedConsumer<XContentBuilder, IOException> mapping = b -> {
b.startObject("test").field("type", "keyword").endObject();
b.startObject("group").field("type", "keyword").endObject();
};
test(mapping, b -> b.field("test", value).field("group", "g"), """
| INLINE STATS test2 = VALUES(test) BY group
| EVAL test = LENGTH(test2)
""", matchesList().item(value.length()), matchesMap().extraOk(), sig -> {
// There are two data node plans, one for each phase.
if (sig.contains("EvalOperator")) {
assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator")
.item("RowInTableLookup")
.item("ColumnLoad")
.item("ProjectOperator")
.item("EvalOperator")
.item("AggregationOperator")
.item("ExchangeSinkOperator")
);
} else {
assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator")
.item("HashAggregationOperator")
.item("ExchangeSinkOperator")
);
}
});
}
/**
* LENGTH not pushed when on a fork branch.
*/
public void testLengthNotPushedToFork() throws IOException {
String value = "v".repeat(between(0, 256));
test(
justType("keyword"),
b -> b.field("test", value),
"""
| FORK
(EVAL test = LENGTH(test) + 1)
(EVAL test = LENGTH(test) + 2)
""",
matchesList().item(List.of(value.length() + 1, value.length() + 2)),
matchesMap().entry("test:column_at_a_time:BytesRefsFromOrds.Singleton", 1),
sig -> assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator")
.item("ProjectOperator")
.item("ExchangeSinkOperator")
)
);
}
public void testLengthNotPushedBeforeFork() throws IOException {
String value = "v".repeat(between(0, 256));
test(
justType("keyword"),
b -> b.field("test", value),
"""
| EVAL test = LENGTH(test)
| FORK
(EVAL j = 1)
(EVAL j = 2)
""",
matchesList().item(value.length()),
matchesMap().entry("test:column_at_a_time:BytesRefsFromOrds.Singleton", 1),
sig -> assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator")
.item("ProjectOperator")
.item("ExchangeSinkOperator")
)
);
}
public void testLengthNotPushedAfterFork() throws IOException {
String value = "v".repeat(between(0, 256));
test(
justType("keyword"),
b -> b.field("test", value),
"""
| FORK
(EVAL j = 1)
(EVAL j = 2)
| EVAL test = LENGTH(test)
""",
matchesList().item(value.length()),
matchesMap().entry("test:column_at_a_time:BytesRefsFromOrds.Singleton", 1),
sig -> assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator")
.item("ProjectOperator")
.item("ExchangeSinkOperator")
)
);
}
private void test(
CheckedConsumer<XContentBuilder, IOException> mapping,
CheckedConsumer<XContentBuilder, IOException> doc,
String eval,
Matcher<?> expectedValue,
MapMatcher expectedLoaders
) throws IOException {
test(
mapping,
doc,
eval,
expectedValue,
expectedLoaders,
sig -> assertMap(
sig,
matchesList().item("LuceneSourceOperator")
.item("ValuesSourceReaderOperator") // the real work is here, checkOperatorProfile checks the status
.item("EvalOperator") // this one just renames the field
.item("AggregationOperator")
.item("ExchangeSinkOperator")
)
);
}
private void test(
CheckedConsumer<XContentBuilder, IOException> mapping,
CheckedConsumer<XContentBuilder, IOException> doc,
String eval,
Matcher<?> expectedValue,
MapMatcher expectedLoaders,
Consumer<List<String>> assertDataNodeSig
) throws IOException {
indexValue(mapping, doc);
RestEsqlTestCase.RequestObjectBuilder builder = requestObjectBuilder().query("""
FROM test
""" + eval + """
| STATS test = MV_SORT(VALUES(test))
""");
/*
* TODO if you just do KEEP test then the load is in the data node reduce driver and not merged:
* \_ProjectExec[[test{f}#7]]
* \_FieldExtractExec[test{f}#7]<[],[]>
* \_EsQueryExec[test], indexMode[standard]]
* \_ExchangeSourceExec[[test{f}#7],false]}, {cluster_name=test-cluster, node_name=test-cluster-0, descrip
* \_ProjectExec[[test{r}#3]]
* \_EvalExec[[LENGTH(test{f}#7) AS test#3]]
* \_LimitExec[1000[INTEGER],50]
* \_ExchangeSourceExec[[test{f}#7],false]}], query={to
*/
builder.profile(true);
Map<String, Object> result = runEsql(builder, new AssertWarnings.NoWarnings(), profileLogger, RestEsqlTestCase.Mode.SYNC);
assertResultMap(
result,
getResultMatcher(result).entry(
"profile",
matchesMap() //
.entry("drivers", instanceOf(List.class))
.entry("plans", instanceOf(List.class))
.entry("planning", matchesMap().extraOk())
.entry("query", matchesMap().extraOk())
),
matchesList().item(matchesMap().entry("name", "test").entry("type", any(String.class))),
matchesList().item(expectedValue)
);
@SuppressWarnings("unchecked")
List<Map<String, Object>> profiles = (List<Map<String, Object>>) ((Map<String, Object>) result.get("profile")).get("drivers");
for (Map<String, Object> p : profiles) {
fixTypesOnProfile(p);
assertThat(p, commonProfile());
List<String> sig = new ArrayList<>();
@SuppressWarnings("unchecked")
List<Map<String, Object>> operators = (List<Map<String, Object>>) p.get("operators");
for (Map<String, Object> o : operators) {
sig.add(checkOperatorProfile(o, expectedLoaders));
}
String description = p.get("description").toString();
switch (description) {
case "data" -> {
logger.info("data {}", sig);
assertDataNodeSig.accept(sig);
}
case "node_reduce" -> logger.info("node_reduce {}", sig);
case "final" -> logger.info("final {}", sig);
case "main.final" -> logger.info("main final {}", sig);
case "subplan-0.final" -> logger.info("subplan-0 final {}", sig);
case "subplan-1.final" -> logger.info("subplan-1 final {}", sig);
default -> throw new IllegalArgumentException("can't match " + description);
}
}
}
private void indexValue(CheckedConsumer<XContentBuilder, IOException> mapping, CheckedConsumer<XContentBuilder, IOException> doc)
throws IOException {
try {
// Delete the index if it has already been created.
client().performRequest(new Request("DELETE", "test"));
} catch (ResponseException e) {
if (e.getResponse().getStatusLine().getStatusCode() != 404) {
throw e;
}
}
Request createIndex = new Request("PUT", "test");
try (XContentBuilder config = JsonXContent.contentBuilder()) {
config.startObject();
config.startObject("settings");
{
config.startObject("index");
config.field("number_of_shards", 1);
config.endObject();
}
config.endObject();
config.startObject("mappings");
{
config.startObject("properties");
mapping.accept(config);
config.endObject();
}
config.endObject();
createIndex.setJsonEntity(Strings.toString(config.endObject()));
}
Response createResponse = client().performRequest(createIndex);
assertThat(
entityToMap(createResponse.getEntity(), XContentType.JSON),
matchesMap().entry("shards_acknowledged", true).entry("index", "test").entry("acknowledged", true)
);
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "");
try (XContentBuilder docJson = JsonXContent.contentBuilder()) {
docJson.startObject();
doc.accept(docJson);
docJson.endObject();
bulk.setJsonEntity("""
{"create":{"_index":"test"}}
""" + Strings.toString(docJson) + "\n");
}
Response bulkResponse = client().performRequest(bulk);
assertThat(entityToMap(bulkResponse.getEntity(), XContentType.JSON), matchesMap().entry("errors", false).extraOk());
}
private void initLookupIndex() throws IOException {
if (indexExists("lookup")) {
return;
}
Request createIndex = new Request("PUT", "lookup");
try (XContentBuilder config = JsonXContent.contentBuilder()) {
config.startObject();
config.startObject("settings");
{
config.startObject("index");
config.field("number_of_shards", 1);
config.field("mode", "lookup");
config.endObject();
}
config.endObject();
config.startObject("mappings");
{
config.startObject("properties");
config.startObject("matching").field("type", "keyword").endObject();
config.startObject("test").field("type", "keyword").endObject();
config.endObject();
}
config.endObject();
createIndex.setJsonEntity(Strings.toString(config.endObject()));
}
Response createResponse = client().performRequest(createIndex);
assertThat(
entityToMap(createResponse.getEntity(), XContentType.JSON),
matchesMap().entry("shards_acknowledged", true).entry("index", "lookup").entry("acknowledged", true)
);
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "");
bulk.setJsonEntity("""
{"create":{"_index":"lookup"}}
{"test": "a", "matching": "lookup"}
""");
Response bulkResponse = client().performRequest(bulk);
assertThat(entityToMap(bulkResponse.getEntity(), XContentType.JSON), matchesMap().entry("errors", false).extraOk());
}
private CheckedConsumer<XContentBuilder, IOException> justType(String type) {
return b -> b.startObject("test").field("type", type).endObject();
}
private static String checkOperatorProfile(Map<String, Object> o, MapMatcher expectedLoaders) {
String name = (String) o.get("operator");
name = PushQueriesIT.TO_NAME.matcher(name).replaceAll("");
if (name.equals("ValuesSourceReaderOperator")) {
MapMatcher expectedOp = matchesMap().entry("operator", startsWith(name))
.entry("status", matchesMap().entry("readers_built", expectedLoaders).extraOk());
assertMap(o, expectedOp);
}
return name;
}
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
@Override
protected boolean preserveClusterUponCompletion() {
// Preserve the cluser to speed up the semantic_text tests
return true;
}
private static boolean setupEmbeddings = false;
private void setUpTextEmbeddingInferenceEndpoint() throws IOException {
setupEmbeddings = true;
Request request = new Request("PUT", "_inference/text_embedding/test");
request.setJsonEntity("""
{
"service": "text_embedding_test_service",
"service_settings": {
"model": "my_model",
"api_key": "abc64",
"dimensions": 128
},
"task_settings": {
}
}
""");
adminClient().performRequest(request);
}
}
| PushExpressionToLoadIT |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/UberJarFormatWorksTest.java | {
"start": 532,
"end": 2000
} | class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void testUberJarFormatWorks() throws Exception {
final File projectDir = getProjectDir("test-uber-jar-format-works");
runGradleWrapper(projectDir, "clean", "build");
final Path quarkusApp = projectDir.toPath().resolve("build");
assertThat(quarkusApp).exists();
Path jar = quarkusApp.resolve("uber-jar-test-1.0.0-SNAPSHOT-runner.jar");
assertThat(jar).exists();
File output = new File(projectDir, "build/output.log");
output.createNewFile();
DevModeClient devModeClient = new DevModeClient();
Process process = launch(jar, output);
try {
// Wait until server up
dumpFileContentOnFailure(() -> {
await()
.pollDelay(1, TimeUnit.SECONDS)
.atMost(1, TimeUnit.MINUTES)
.until(() -> devModeClient.isCode("/hello", 200));
return null;
}, output, ConditionTimeoutException.class);
String logs = FileUtils.readFileToString(output, "UTF-8");
assertThat(logs).contains("INFO").contains("cdi, resteasy");
// test that the application name and version are properly set
assertThat(devModeClient.getHttpResponse("/hello")).isEqualTo("hello");
} finally {
process.destroy();
}
}
}
| UberJarFormatWorksTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleBulkInfo.java | {
"start": 774,
"end": 4624
} | class ____ statistics collected by the downsampling task
* for bulk indexing operations.
*/
public record DownsampleBulkInfo(
long totalBulkCount,
long bulkIngestSumMillis,
long maxBulkIngestMillis,
long minBulkIngestMillis,
long bulkTookSumMillis,
long maxBulkTookMillis,
long minBulkTookMillis
) implements NamedWriteable, ToXContentObject {
public static final String NAME = "rollup_bulk_info";
private static final ParseField TOTAL_BULK_COUNT = new ParseField("total_bulk_count");
private static final ParseField BULK_INGEST_SUM_MILLIS = new ParseField("bulk_ingest_sum_millis");
private static final ParseField MAX_BULK_INGEST_MILLIS = new ParseField("max_bulk_ingest_millis");
private static final ParseField MIN_BULK_INGEST_MILLIS = new ParseField("min_bulk_ingest_millis");
private static final ParseField BULK_TOOK_SUM_MILLIS = new ParseField("bulk_took_sum_millis");
private static final ParseField MAX_BULK_TOOK_MILLIS = new ParseField("max_bulk_took_millis");
private static final ParseField MIN_BULK_TOOK_MILLIS = new ParseField("min_bulk_took_millis");
private static final ConstructingObjectParser<DownsampleBulkInfo, Void> PARSER;
static {
PARSER = new ConstructingObjectParser<>(
NAME,
args -> new DownsampleBulkInfo(
(Long) args[0],
(Long) args[1],
(Long) args[2],
(Long) args[3],
(Long) args[4],
(Long) args[5],
(Long) args[6]
)
);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_BULK_COUNT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BULK_INGEST_SUM_MILLIS);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAX_BULK_INGEST_MILLIS);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), MIN_BULK_INGEST_MILLIS);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BULK_TOOK_SUM_MILLIS);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAX_BULK_TOOK_MILLIS);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), MIN_BULK_TOOK_MILLIS);
}
public DownsampleBulkInfo(final StreamInput in) throws IOException {
this(in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong());
}
public static DownsampleBulkInfo fromXContext(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(TOTAL_BULK_COUNT.getPreferredName(), totalBulkCount);
builder.field(BULK_INGEST_SUM_MILLIS.getPreferredName(), bulkIngestSumMillis);
builder.field(MAX_BULK_INGEST_MILLIS.getPreferredName(), maxBulkIngestMillis);
builder.field(MIN_BULK_INGEST_MILLIS.getPreferredName(), minBulkIngestMillis);
builder.field(BULK_TOOK_SUM_MILLIS.getPreferredName(), bulkTookSumMillis);
builder.field(MAX_BULK_TOOK_MILLIS.getPreferredName(), maxBulkTookMillis);
builder.field(MIN_BULK_TOOK_MILLIS.getPreferredName(), minBulkTookMillis);
return builder.endObject();
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(totalBulkCount);
out.writeVLong(bulkIngestSumMillis);
out.writeVLong(maxBulkIngestMillis);
out.writeVLong(minBulkIngestMillis);
out.writeVLong(bulkTookSumMillis);
out.writeVLong(maxBulkTookMillis);
out.writeVLong(minBulkTookMillis);
}
}
| includes |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/view/DefaultRequestToViewNameTranslator.java | {
"start": 2366,
"end": 5564
} | class ____ implements RequestToViewNameTranslator {
private static final String SLASH = "/";
private String prefix = "";
private String suffix = "";
private String separator = SLASH;
private boolean stripLeadingSlash = true;
private boolean stripTrailingSlash = true;
private boolean stripExtension = true;
/**
* Set the prefix to prepend to generated view names.
* @param prefix the prefix to prepend to generated view names
*/
public void setPrefix(@Nullable String prefix) {
this.prefix = (prefix != null ? prefix : "");
}
/**
* Set the suffix to append to generated view names.
* @param suffix the suffix to append to generated view names
*/
public void setSuffix(@Nullable String suffix) {
this.suffix = (suffix != null ? suffix : "");
}
/**
* Set the value that will replace '{@code /}' as the separator
* in the view name. The default behavior simply leaves '{@code /}'
* as the separator.
*/
public void setSeparator(String separator) {
this.separator = separator;
}
/**
* Set whether leading slashes should be stripped from the URI when
* generating the view name. Default is "true".
*/
public void setStripLeadingSlash(boolean stripLeadingSlash) {
this.stripLeadingSlash = stripLeadingSlash;
}
/**
* Set whether trailing slashes should be stripped from the URI when
* generating the view name. Default is "true".
*/
public void setStripTrailingSlash(boolean stripTrailingSlash) {
this.stripTrailingSlash = stripTrailingSlash;
}
/**
* Set whether file extensions should be stripped from the URI when
* generating the view name. Default is "true".
*/
public void setStripExtension(boolean stripExtension) {
this.stripExtension = stripExtension;
}
/**
* Translates the request URI of the incoming {@link HttpServletRequest}
* into the view name based on the configured parameters.
* @throws IllegalArgumentException if neither a parsed RequestPath, nor a
* String lookupPath have been resolved and cached as a request attribute.
* @see ServletRequestPathUtils#getCachedPath(ServletRequest)
* @see #transformPath
*/
@Override
public String getViewName(HttpServletRequest request) {
String path = ServletRequestPathUtils.getCachedPathValue(request);
return (this.prefix + transformPath(path) + this.suffix);
}
/**
* Transform the request URI (in the context of the webapp) stripping
* slashes and extensions, and replacing the separator as required.
* @param lookupPath the lookup path for the current request,
* as determined by the UrlPathHelper
* @return the transformed path, with slashes and extensions stripped
* if desired
*/
protected @Nullable String transformPath(String lookupPath) {
String path = lookupPath;
if (this.stripLeadingSlash && path.startsWith(SLASH)) {
path = path.substring(1);
}
if (this.stripTrailingSlash && path.endsWith(SLASH)) {
path = path.substring(0, path.length() - 1);
}
if (this.stripExtension) {
path = StringUtils.stripFilenameExtension(path);
}
if (!SLASH.equals(this.separator)) {
path = StringUtils.replace(path, SLASH, this.separator);
}
return path;
}
}
| DefaultRequestToViewNameTranslator |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/filter/GenericFilterBean.java | {
"start": 11927,
"end": 13254
} | class ____ extends MutablePropertyValues {
/**
* Create new FilterConfigPropertyValues.
* @param config the FilterConfig we'll use to take PropertyValues from
* @param requiredProperties set of property names we need, where
* we can't accept default values
* @throws ServletException if any required properties are missing
*/
public FilterConfigPropertyValues(FilterConfig config, Set<String> requiredProperties)
throws ServletException {
Set<String> missingProps = (!CollectionUtils.isEmpty(requiredProperties) ?
new HashSet<>(requiredProperties) : null);
Enumeration<String> paramNames = config.getInitParameterNames();
while (paramNames.hasMoreElements()) {
String property = paramNames.nextElement();
Object value = config.getInitParameter(property);
addPropertyValue(new PropertyValue(property, value));
if (missingProps != null) {
missingProps.remove(property);
}
}
// Fail if we are still missing properties.
if (!CollectionUtils.isEmpty(missingProps)) {
throw new ServletException(
"Initialization from FilterConfig for filter '" + config.getFilterName() +
"' failed; the following required properties were missing: " +
StringUtils.collectionToDelimitedString(missingProps, ", "));
}
}
}
}
| FilterConfigPropertyValues |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/compatible/jsonlib/CompatibleTest0.java | {
"start": 4887,
"end": 5598
} | class ____ {
private float f1;
private double f2;
private Float f3;
private Double f4;
public float getF1() {
return f1;
}
public void setF1(float f1) {
this.f1 = f1;
}
public double getF2() {
return f2;
}
public void setF2(double f2) {
this.f2 = f2;
}
public Float getF3() {
return f3;
}
public void setF3(Float f3) {
this.f3 = f3;
}
public Double getF4() {
return f4;
}
public void setF4(Double f4) {
this.f4 = f4;
}
}
public static | V2 |
java | spring-projects__spring-boot | module/spring-boot-session/src/test/java/org/springframework/boot/session/actuate/endpoint/SessionsEndpointTests.java | {
"start": 1346,
"end": 4176
} | class ____ {
private static final Session session = new MapSession();
@SuppressWarnings("unchecked")
private final SessionRepository<Session> sessionRepository = mock(SessionRepository.class);
@SuppressWarnings("unchecked")
private final FindByIndexNameSessionRepository<Session> indexedSessionRepository = mock(
FindByIndexNameSessionRepository.class);
private final SessionsEndpoint endpoint = new SessionsEndpoint(this.sessionRepository,
this.indexedSessionRepository);
@Test
void sessionsForUsername() {
given(this.indexedSessionRepository.findByPrincipalName("user"))
.willReturn(Collections.singletonMap(session.getId(), session));
SessionsDescriptor sessions = this.endpoint.sessionsForUsername("user");
assertThat(sessions).isNotNull();
List<SessionDescriptor> result = sessions.getSessions();
assertThat(result).hasSize(1);
assertThat(result.get(0).getId()).isEqualTo(session.getId());
assertThat(result.get(0).getAttributeNames()).isEqualTo(session.getAttributeNames());
assertThat(result.get(0).getCreationTime()).isEqualTo(session.getCreationTime());
assertThat(result.get(0).getLastAccessedTime()).isEqualTo(session.getLastAccessedTime());
assertThat(result.get(0).getMaxInactiveInterval()).isEqualTo(session.getMaxInactiveInterval().getSeconds());
assertThat(result.get(0).isExpired()).isEqualTo(session.isExpired());
then(this.indexedSessionRepository).should().findByPrincipalName("user");
}
@Test
void sessionsForUsernameWhenNoIndexedRepository() {
SessionsEndpoint endpoint = new SessionsEndpoint(this.sessionRepository, null);
assertThat(endpoint.sessionsForUsername("user")).isNull();
}
@Test
void getSession() {
given(this.sessionRepository.findById(session.getId())).willReturn(session);
SessionDescriptor result = this.endpoint.getSession(session.getId());
assertThat(result).isNotNull();
assertThat(result.getId()).isEqualTo(session.getId());
assertThat(result.getAttributeNames()).isEqualTo(session.getAttributeNames());
assertThat(result.getCreationTime()).isEqualTo(session.getCreationTime());
assertThat(result.getLastAccessedTime()).isEqualTo(session.getLastAccessedTime());
assertThat(result.getMaxInactiveInterval()).isEqualTo(session.getMaxInactiveInterval().getSeconds());
assertThat(result.isExpired()).isEqualTo(session.isExpired());
then(this.sessionRepository).should().findById(session.getId());
}
@Test
void getSessionWithIdNotFound() {
given(this.sessionRepository.findById("not-found")).willReturn(null);
assertThat(this.endpoint.getSession("not-found")).isNull();
then(this.sessionRepository).should().findById("not-found");
}
@Test
void deleteSession() {
this.endpoint.deleteSession(session.getId());
then(this.sessionRepository).should().deleteById(session.getId());
}
}
| SessionsEndpointTests |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassUtils.java | {
"start": 32216,
"end": 32666
} | class ____ in the default package, return an empty string.
* </p>
*
* @param name the name to get the package name for, may be {@code null}.
* @return the package name or an empty string.
* @since 2.4
*/
public static String getPackageCanonicalName(final String name) {
return getPackageName(getCanonicalName(name));
}
/**
* Gets the package name of a {@link Class}.
*
* @param cls the | is |
java | processing__processing4 | core/src/processing/core/PApplet.java | {
"start": 336199,
"end": 336557
} | class ____ load (with package if any)
*/
static public void main(final String mainClass) {
main(mainClass, null);
}
/**
* Convenience method so that PApplet.main("YourSketch", args) launches a
* sketch, rather than having to wrap it into a String array, and appending
* the 'args' array when not null.
* @param mainClass name of the | to |
java | apache__avro | lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectNestedObjectArrayTest.java | {
"start": 1696,
"end": 2476
} | class ____ {
private static final int ARRAY_SIZE = 10;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final ObjectArrayWrapper r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<BasicRecord[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static | ReflectNestedObjectArrayTest |
java | apache__camel | components/camel-azure/camel-azure-storage-datalake/src/main/java/org/apache/camel/component/azure/storage/datalake/operations/DataLakeFileOperations.java | {
"start": 2826,
"end": 15499
} | class ____ {
private final DataLakeFileClientWrapper client;
private final DataLakeConfigurationOptionsProxy configurationProxy;
public DataLakeFileOperations(final DataLakeConfiguration configuration, final DataLakeFileClientWrapper client) {
this.client = client;
configurationProxy = new DataLakeConfigurationOptionsProxy(configuration);
}
public DataLakeOperationResponse getFile(final Exchange exchange) throws IOException {
final Message message = DataLakeUtils.getInMessage(exchange);
final OutputStream outputStream;
if (ObjectHelper.isEmpty(message)) {
outputStream = null;
} else {
outputStream = message.getBody(OutputStream.class);
}
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
if (outputStream == null) {
InputStream fileInputStream = fileClientWrapper.openInputStream();
return new DataLakeOperationResponse(fileInputStream);
}
final FileRange fileRange = configurationProxy.getFileRange(exchange);
final FileCommonRequestOptions fileCommonRequestOptions = getCommonRequestOptions(exchange);
final DownloadRetryOptions downloadRetryOptions = getDownloadRetryOptions(configurationProxy);
try {
final FileReadResponse readResponse
= fileClientWrapper.downloadWithResponse(outputStream, fileRange, downloadRetryOptions,
fileCommonRequestOptions.getRequestConditions(), fileCommonRequestOptions.getContentMD5() != null,
fileCommonRequestOptions.getTimeout());
final DataLakeExchangeHeaders dataLakeExchangeHeaders = DataLakeExchangeHeaders
.createDataLakeExchangeHeadersFromFileReadHeaders(readResponse.getDeserializedHeaders())
.httpHeaders(readResponse.getHeaders());
return new DataLakeOperationResponse(outputStream, dataLakeExchangeHeaders.toMap());
} finally {
if (Boolean.TRUE.equals(configurationProxy.getConfiguration().getCloseStreamAfterRead())) {
outputStream.close();
}
}
}
public DataLakeOperationResponse downloadToFile(final Exchange exchange) {
final String fileDir = configurationProxy.getFileDir(exchange);
if (ObjectHelper.isEmpty(fileDir)) {
throw new IllegalArgumentException("to download a file, you need to specify the fileDir in the URI");
}
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
final File recieverFile = new File(fileDir, fileClientWrapper.getFileName());
final FileCommonRequestOptions commonRequestOptions = getCommonRequestOptions(exchange);
final FileRange fileRange = configurationProxy.getFileRange(exchange);
final ParallelTransferOptions parallelTransferOptions = configurationProxy.getParallelTransferOptions(exchange);
final DownloadRetryOptions downloadRetryOptions = getDownloadRetryOptions(configurationProxy);
final Set<OpenOption> openOptions = configurationProxy.getOpenOptions(exchange);
final Response<PathProperties> response
= fileClientWrapper.downloadToFileWithResponse(recieverFile.toString(), fileRange,
parallelTransferOptions, downloadRetryOptions, commonRequestOptions.getRequestConditions(),
commonRequestOptions.getContentMD5() != null, openOptions, commonRequestOptions.getTimeout());
final DataLakeExchangeHeaders exchangeHeaders
= DataLakeExchangeHeaders.createDataLakeExchangeHeadersFromPathProperties(response.getValue())
.httpHeaders(response.getHeaders())
.fileName(recieverFile.toString());
return new DataLakeOperationResponse(recieverFile, exchangeHeaders.toMap());
}
public DataLakeOperationResponse downloadLink(final Exchange exchange) {
final OffsetDateTime offsetDateTime = OffsetDateTime.now();
final PathSasPermission sasPermission = new PathSasPermission().setReadPermission(true);
final Long expirationMillis = configurationProxy.getDownloadLinkExpiration(exchange);
OffsetDateTime offsetDateTimeToSet;
if (expirationMillis != null) {
offsetDateTimeToSet = offsetDateTime.plusSeconds(expirationMillis / 1000);
} else {
final long defaultExpirationTime = 60L * 60L;
offsetDateTimeToSet = offsetDateTime.plusSeconds(defaultExpirationTime);
}
final DataLakeServiceSasSignatureValues serviceSasSignatureValues
= new DataLakeServiceSasSignatureValues(offsetDateTimeToSet, sasPermission);
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
final String url = fileClientWrapper.getFileUrl() + "?" + fileClientWrapper.generateSas(serviceSasSignatureValues);
final DataLakeExchangeHeaders headers = DataLakeExchangeHeaders.create().downloadLink(url);
return new DataLakeOperationResponse(url, headers.toMap());
}
public DataLakeOperationResponse deleteFile(final Exchange exchange) {
final FileCommonRequestOptions commonRequestOptions = getCommonRequestOptions(exchange);
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
Response<Void> response
= fileClientWrapper.delete(commonRequestOptions.getRequestConditions(), commonRequestOptions.getTimeout());
DataLakeExchangeHeaders exchangeHeaders = DataLakeExchangeHeaders.create();
exchangeHeaders.httpHeaders(response.getHeaders());
return new DataLakeOperationResponse(true, exchangeHeaders.toMap());
}
public DataLakeOperationResponse appendToFile(final Exchange exchange) throws IOException {
final FileCommonRequestOptions commonRequestOptions = getCommonRequestOptions(exchange);
final FileStreamAndLength fileStreamAndLength = FileStreamAndLength.createFileStreamAndLengthFromExchangeBody(exchange);
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
final Long fileOffset;
if (configurationProxy.getFileOffset(exchange) == null) {
fileOffset = fileClientWrapper.getFileSize();
} else {
fileOffset = configurationProxy.getFileOffset(exchange);
}
final DataLakeFileAppendOptions options = new DataLakeFileAppendOptions();
options.setContentHash(commonRequestOptions.getContentMD5());
options.setLeaseId(commonRequestOptions.getLeaseId());
options.setFlush(configurationProxy.getFlush(exchange));
final Response<Void> response = fileClientWrapper.appendWithResponse(fileStreamAndLength.getInputStream(), fileOffset,
fileStreamAndLength.getStreamLength(), commonRequestOptions.getTimeout(), options);
DataLakeExchangeHeaders exchangeHeaders = DataLakeExchangeHeaders.create();
exchangeHeaders.httpHeaders(response.getHeaders());
return new DataLakeOperationResponse(true, exchangeHeaders.toMap());
}
public DataLakeOperationResponse flushToFile(final Exchange exchange) {
final FileCommonRequestOptions commonRequestOptions = getCommonRequestOptions(exchange);
final Long position = configurationProxy.getPosition(exchange);
final Boolean retainUncommitedData = configurationProxy.retainUnCommitedData(exchange);
final Boolean close = configurationProxy.getClose(exchange);
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
final Response<PathInfo> response
= fileClientWrapper.flushWithResponse(position + fileClientWrapper.getFileSize(), retainUncommitedData, close,
commonRequestOptions.getPathHttpHeaders(), commonRequestOptions.getRequestConditions(),
commonRequestOptions.getTimeout());
DataLakeExchangeHeaders exchangeHeaders
= DataLakeExchangeHeaders.createDataLakeExchangeHeadersFromPathInfo(response.getValue())
.httpHeaders(response.getHeaders());
return new DataLakeOperationResponse(response.getValue(), exchangeHeaders.toMap());
}
public DataLakeOperationResponse uploadFromFile(final Exchange exchange) {
final String path = configurationProxy.getPath(exchange);
final ParallelTransferOptions transferOptions = configurationProxy.getParallelTransferOptions(exchange);
final FileCommonRequestOptions commonRequestOptions = getCommonRequestOptions(exchange);
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
fileClientWrapper.uploadFromFile(path, transferOptions, commonRequestOptions.getPathHttpHeaders(),
commonRequestOptions.getMetadata(), commonRequestOptions.getRequestConditions(),
commonRequestOptions.getTimeout());
return new DataLakeOperationResponse(true);
}
public DataLakeOperationResponse upload(final Exchange exchange) throws Exception {
final FileCommonRequestOptions commonRequestOptions = getCommonRequestOptions(exchange);
final ParallelTransferOptions transferOptions = configurationProxy.getParallelTransferOptions(exchange);
final InputStream is = exchange.getMessage().getMandatoryBody(InputStream.class);
final String permission = configurationProxy.getPermission(exchange);
final String umask = configurationProxy.getUmask(exchange);
final FileParallelUploadOptions uploadOptions
= new FileParallelUploadOptions(is)
.setHeaders(commonRequestOptions.getPathHttpHeaders()).setParallelTransferOptions(transferOptions)
.setMetadata(commonRequestOptions.getMetadata()).setPermissions(permission)
.setRequestConditions(commonRequestOptions.getRequestConditions())
.setRequestConditions(commonRequestOptions.getRequestConditions()).setUmask(umask);
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
final Response<PathInfo> response
= fileClientWrapper.uploadWithResponse(uploadOptions, commonRequestOptions.getTimeout());
DataLakeExchangeHeaders exchangeHeaders
= DataLakeExchangeHeaders.createDataLakeExchangeHeadersFromPathInfo(response.getValue())
.httpHeaders(response.getHeaders());
return new DataLakeOperationResponse(true, exchangeHeaders.toMap());
}
public DataLakeOperationResponse openQueryInputStream(final Exchange exchange) {
FileQueryOptions queryOptions = configurationProxy.getFileQueryOptions(exchange);
final DataLakeFileClientWrapper fileClientWrapper = getFileClientWrapper(exchange);
final Response<InputStream> response = fileClientWrapper.openQueryInputStreamWithResponse(queryOptions);
DataLakeExchangeHeaders exchangeHeaders = DataLakeExchangeHeaders.create();
exchangeHeaders.httpHeaders(response.getHeaders());
return new DataLakeOperationResponse(response.getValue(), exchangeHeaders.toMap());
}
private FileCommonRequestOptions getCommonRequestOptions(final Exchange exchange) {
final PathHttpHeaders httpHeaders = configurationProxy.getPathHttpHeaders(exchange);
final Map<String, String> metadata = configurationProxy.getMetadata(exchange);
final AccessTier accessTier = configurationProxy.getAccessTier(exchange);
final DataLakeRequestConditions dataLakeRequestConditions = configurationProxy.getDataLakeRequestConditions(exchange);
final Duration timeout = configurationProxy.getTimeout(exchange);
final byte[] contentMD5 = configurationProxy.getContentMd5(exchange);
return new FileCommonRequestOptions(httpHeaders, metadata, accessTier, dataLakeRequestConditions, contentMD5, timeout);
}
private DownloadRetryOptions getDownloadRetryOptions(final DataLakeConfigurationOptionsProxy proxy) {
return new DownloadRetryOptions().setMaxRetryRequests(proxy.getMaxRetryRequests());
}
private DataLakeFileClientWrapper getFileClientWrapper(final Exchange exchange) {
final DataLakeFileClient fileClient = configurationProxy.getFileClient(exchange);
return null == fileClient ? client : new DataLakeFileClientWrapper(fileClient);
}
}
| DataLakeFileOperations |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ParameterNameTest.java | {
"start": 13110,
"end": 13397
} | class ____ purpose
{
System.err.println(InnerClassTest.this);
}
}
}
@Test
public void innerClassNegative() {
testHelper
.addSourceLines(
"Test.java",
"import " + InnerClassTest.class.getCanonicalName() + ";",
" | on |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/entrypoint/ClusterEntrypointUtils.java | {
"start": 2091,
"end": 2602
} | class ____ not be instantiated.");
}
/**
* Parses passed String array using the parameter definitions of the passed {@code
* ParserResultFactory}. The method will call {@code System.exit} and print the usage
* information to stdout in case of a parsing error.
*
* @param args The String array that shall be parsed.
* @param parserResultFactory The {@code ParserResultFactory} that collects the parameter
* parsing instructions.
* @param mainClass The main | should |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerArgumentToStringTest.java | {
"start": 4576,
"end": 4961
} | class ____ {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
public void f() {
logger.atInfo().log("hello '%s'", toString());
}
}
""")
.addOutputLines(
"Test.java",
"""
import com.google.common.flogger.FluentLogger;
| Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/jpa/PersistenceUnitOverridesTests.java | {
"start": 19505,
"end": 19678
} | class ____ extends Dialect {
@Override
public DatabaseVersion getVersion() {
return SimpleDatabaseVersion.ZERO_VERSION;
}
}
@Entity
public static | IntegrationDialect |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FilerProducerDoneFileNameRouteTest.java | {
"start": 1245,
"end": 2587
} | class ____ extends ContextTestSupport {
private static final String TEST_FILE_NAME = "hello" + UUID.randomUUID() + ".txt";
private final Properties myProp = new Properties();
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myProp", myProp);
return jndi;
}
@Test
public void testProducerPlaceholderPrefixDoneFileName() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBodyAndHeader("direct:start", "Hello World", Exchange.FILE_NAME, TEST_FILE_NAME);
assertMockEndpointsSatisfied();
assertTrue(oneExchangeDone.matches(5, TimeUnit.SECONDS));
assertFileExists(testFile(TEST_FILE_NAME));
assertFileExists(testFile("done-" + TEST_FILE_NAME));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
myProp.put("myDir", testDirectory().toString());
context.getPropertiesComponent().setLocation("ref:myProp");
from("direct:start").to("file:{{myDir}}?doneFileName=done-${file:name}").to("mock:result");
}
};
}
}
| FilerProducerDoneFileNameRouteTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsIntsEvaluator.java | {
"start": 1195,
"end": 4953
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsIntsEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator lhs;
private final EvalOperator.ExpressionEvaluator rhs;
private final DriverContext driverContext;
private Warnings warnings;
public EqualsIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs,
EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) {
this.source = source;
this.lhs = lhs;
this.rhs = rhs;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (IntBlock lhsBlock = (IntBlock) lhs.eval(page)) {
try (IntBlock rhsBlock = (IntBlock) rhs.eval(page)) {
IntVector lhsVector = lhsBlock.asVector();
if (lhsVector == null) {
return eval(page.getPositionCount(), lhsBlock, rhsBlock);
}
IntVector rhsVector = rhsBlock.asVector();
if (rhsVector == null) {
return eval(page.getPositionCount(), lhsBlock, rhsBlock);
}
return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock();
}
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += lhs.baseRamBytesUsed();
baseRamBytesUsed += rhs.baseRamBytesUsed();
return baseRamBytesUsed;
}
public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) {
try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (lhsBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
switch (rhsBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
int lhs = lhsBlock.getInt(lhsBlock.getFirstValueIndex(p));
int rhs = rhsBlock.getInt(rhsBlock.getFirstValueIndex(p));
result.appendBoolean(Equals.processInts(lhs, rhs));
}
return result.build();
}
}
public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) {
try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
int lhs = lhsVector.getInt(p);
int rhs = rhsVector.getInt(p);
result.appendBoolean(p, Equals.processInts(lhs, rhs));
}
return result.build();
}
}
@Override
public String toString() {
return "EqualsIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(lhs, rhs);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | EqualsIntsEvaluator |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/authenticator/SaslClientAuthenticator.java | {
"start": 31697,
"end": 35683
} | class ____ {
public ApiVersionsResponse apiVersionsResponseFromOriginalAuthentication;
public long reauthenticationBeginNanos;
public List<NetworkReceive> pendingAuthenticatedReceives = new ArrayList<>();
public ApiVersionsResponse apiVersionsResponseReceivedFromBroker;
public Long positiveSessionLifetimeMs;
public long authenticationEndNanos;
public Long clientSessionReauthenticationTimeNanos;
public void reauthenticating(ApiVersionsResponse apiVersionsResponseFromOriginalAuthentication,
long reauthenticationBeginNanos) {
this.apiVersionsResponseFromOriginalAuthentication = Objects
.requireNonNull(apiVersionsResponseFromOriginalAuthentication);
this.reauthenticationBeginNanos = reauthenticationBeginNanos;
}
public boolean reauthenticating() {
return apiVersionsResponseFromOriginalAuthentication != null;
}
public ApiVersionsResponse apiVersionsResponse() {
return reauthenticating() ? apiVersionsResponseFromOriginalAuthentication
: apiVersionsResponseReceivedFromBroker;
}
/**
* Return the (always non-null but possibly empty) NetworkReceive response that
* arrived during re-authentication that is unrelated to re-authentication, if
* any. This corresponds to a request sent prior to the beginning of
* re-authentication; the request was made when the channel was successfully
* authenticated, and the response arrived during the re-authentication
* process.
*
* @return the (always non-null but possibly empty) NetworkReceive response
* that arrived during re-authentication that is unrelated to
* re-authentication, if any
*/
public Optional<NetworkReceive> pollResponseReceivedDuringReauthentication() {
if (pendingAuthenticatedReceives.isEmpty())
return Optional.empty();
return Optional.of(pendingAuthenticatedReceives.remove(0));
}
public void setAuthenticationEndAndSessionReauthenticationTimes(long nowNanos) {
authenticationEndNanos = nowNanos;
long sessionLifetimeMsToUse;
if (positiveSessionLifetimeMs != null) {
// pick a random percentage between 85% and 95% for session re-authentication
double pctWindowFactorToTakeNetworkLatencyAndClockDriftIntoAccount = 0.85;
double pctWindowJitterToAvoidReauthenticationStormAcrossManyChannelsSimultaneously = 0.10;
double pctToUse = pctWindowFactorToTakeNetworkLatencyAndClockDriftIntoAccount + RNG.nextDouble()
* pctWindowJitterToAvoidReauthenticationStormAcrossManyChannelsSimultaneously;
sessionLifetimeMsToUse = (long) (positiveSessionLifetimeMs * pctToUse);
clientSessionReauthenticationTimeNanos = Math.addExact(authenticationEndNanos, Utils.msToNs(sessionLifetimeMsToUse));
log.debug(
"Finished {} with session expiration in {} ms and session re-authentication on or after {} ms",
authenticationOrReauthenticationText(), positiveSessionLifetimeMs, sessionLifetimeMsToUse);
} else
log.debug("Finished {} with no session expiration and no session re-authentication",
authenticationOrReauthenticationText());
}
public Long reauthenticationLatencyMs() {
return reauthenticating()
? Math.round((authenticationEndNanos - reauthenticationBeginNanos) / 1000.0 / 1000.0)
: null;
}
private String authenticationOrReauthenticationText() {
return reauthenticating() ? "re-authentication" : "authentication";
}
}
}
| ReauthInfo |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/files/TestUploadEtag.java | {
"start": 1001,
"end": 6428
} | class ____ {
@Test
public void testFromCompletedPartCRC32() {
final CompletedPart completedPart = CompletedPart.builder()
.eTag("tag")
.checksumCRC32("checksum")
.build();
final UploadEtag uploadEtag = UploadEtag.fromCompletedPart(completedPart);
Assertions.assertThat(uploadEtag.getEtag())
.describedAs("Etag mismatch")
.isEqualTo("tag");
Assertions.assertThat(uploadEtag.getChecksumAlgorithm())
.describedAs("Checksum algorithm should be CRC32")
.isEqualTo("CRC32");
Assertions.assertThat(uploadEtag.getChecksum())
.describedAs("Checksum mismatch")
.isEqualTo("checksum");
}
@Test
public void testFromCompletedPartCRC32C() {
final CompletedPart completedPart = CompletedPart.builder()
.eTag("tag")
.checksumCRC32C("checksum")
.build();
final UploadEtag uploadEtag = UploadEtag.fromCompletedPart(completedPart);
Assertions.assertThat(uploadEtag.getEtag())
.describedAs("Etag mismatch")
.isEqualTo("tag");
Assertions.assertThat(uploadEtag.getChecksumAlgorithm())
.describedAs("Checksum algorithm should be CRC32C")
.isEqualTo("CRC32C");
Assertions.assertThat(uploadEtag.getChecksum())
.describedAs("Checksum mismatch")
.isEqualTo("checksum");
}
@Test
public void testFromCompletedPartSHA1() {
final CompletedPart completedPart = CompletedPart.builder()
.eTag("tag")
.checksumSHA1("checksum")
.build();
final UploadEtag uploadEtag = UploadEtag.fromCompletedPart(completedPart);
Assertions.assertThat(uploadEtag.getEtag())
.describedAs("Etag mismatch")
.isEqualTo("tag");
Assertions.assertThat(uploadEtag.getChecksumAlgorithm())
.describedAs("Checksum algorithm should be SHA1")
.isEqualTo("SHA1");
Assertions.assertThat(uploadEtag.getChecksum())
.describedAs("Checksum mismatch")
.isEqualTo("checksum");
}
@Test
public void testFromCompletedPartSHA256() {
final CompletedPart completedPart = CompletedPart.builder()
.eTag("tag")
.checksumSHA256("checksum")
.build();
final UploadEtag uploadEtag = UploadEtag.fromCompletedPart(completedPart);
Assertions.assertThat(uploadEtag.getEtag())
.describedAs("Etag mismatch")
.isEqualTo("tag");
Assertions.assertThat(uploadEtag.getChecksumAlgorithm())
.describedAs("Checksum algorithm should be SHA256")
.isEqualTo("SHA256");
Assertions.assertThat(uploadEtag.getChecksum())
.describedAs("Checksum mismatch")
.isEqualTo("checksum");
}
@Test
public void testFromCompletedPartNoChecksum() {
final CompletedPart completedPart = CompletedPart.builder()
.eTag("tag")
.build();
final UploadEtag uploadEtag = UploadEtag.fromCompletedPart(completedPart);
Assertions.assertThat(uploadEtag.getEtag())
.describedAs("Etag mismatch")
.isEqualTo("tag");
Assertions.assertThat(uploadEtag.getChecksumAlgorithm())
.describedAs("uploadEtag.getChecksumAlgorithm()")
.isNull();
Assertions.assertThat(uploadEtag.getChecksum())
.describedAs("uploadEtag.getChecksum()")
.isNull();
}
@Test
public void testToCompletedPartCRC32() {
final UploadEtag uploadEtag = new UploadEtag("tag", "CRC32", "checksum");
final CompletedPart completedPart = UploadEtag.toCompletedPart(uploadEtag, 1);
Assertions.assertThat(completedPart.checksumCRC32())
.describedAs("Checksum mismatch")
.isEqualTo("checksum");
}
@Test
public void testToCompletedPartCRC32C() {
final UploadEtag uploadEtag = new UploadEtag("tag", "CRC32C", "checksum");
final CompletedPart completedPart = UploadEtag.toCompletedPart(uploadEtag, 1);
Assertions.assertThat(completedPart.checksumCRC32C())
.describedAs("Checksum mismatch")
.isEqualTo("checksum");
}
@Test
public void testToCompletedPartSHA1() {
final UploadEtag uploadEtag = new UploadEtag("tag", "SHA1", "checksum");
final CompletedPart completedPart = UploadEtag.toCompletedPart(uploadEtag, 1);
Assertions.assertThat(completedPart.checksumSHA1())
.describedAs("Checksum mismatch")
.isEqualTo("checksum");
}
@Test
public void testToCompletedPartSHA256() {
final UploadEtag uploadEtag = new UploadEtag("tag", "SHA256", "checksum");
final CompletedPart completedPart = UploadEtag.toCompletedPart(uploadEtag, 1);
Assertions.assertThat(completedPart.checksumSHA256())
.describedAs("Checksum mismatch")
.isEqualTo("checksum");
}
@Test
public void testToCompletedPartNoChecksum() {
final UploadEtag uploadEtag = new UploadEtag("tag", null, null);
final CompletedPart completedPart = UploadEtag.toCompletedPart(uploadEtag, 1);
Assertions.assertThat(completedPart.checksumCRC32())
.describedAs("completedPart.checksumCRC32()")
.isNull();
Assertions.assertThat(completedPart.checksumCRC32C())
.describedAs("completedPart.checksumCRC32C()")
.isNull();
Assertions.assertThat(completedPart.checksumSHA1())
.describedAs("completedPart.checksumSHA1()")
.isNull();
Assertions.assertThat(completedPart.checksumSHA256())
.describedAs("completedPart.checksumSHA256()")
.isNull();
}
}
| TestUploadEtag |
java | greenrobot__greendao | tests/DaoTest/src/androidTest/java/org/greenrobot/greendao/daotest/query/QueryLimitOffsetTest.java | {
"start": 1156,
"end": 4884
} | class ____ extends TestEntityTestBase {
@Override
protected void setUp() throws Exception {
super.setUp();
QueryBuilder.LOG_SQL = true;
QueryBuilder.LOG_VALUES = true;
}
public void testQueryBuilderLimit() {
insert(10);
List<TestEntity> result = dao.queryBuilder().limit(3).orderAsc(Properties.SimpleInt).list();
assertEquals(3, result.size());
assertEquals(getSimpleInteger(0), result.get(0).getSimpleInteger().intValue());
assertEquals(getSimpleInteger(1), result.get(1).getSimpleInteger().intValue());
assertEquals(getSimpleInteger(2), result.get(2).getSimpleInteger().intValue());
}
public void testQueryBuilderOffsetAndLimit() {
insert(10);
List<TestEntity> result = dao.queryBuilder().offset(3).limit(3).orderAsc(Properties.SimpleInt).list();
assertEquals(3, result.size());
assertEquals(getSimpleInteger(3), result.get(0).getSimpleInteger().intValue());
assertEquals(getSimpleInteger(4), result.get(1).getSimpleInteger().intValue());
assertEquals(getSimpleInteger(5), result.get(2).getSimpleInteger().intValue());
}
public void testQueryBuilderOffsetAndLimitWithWhere() {
insert(10);
List<TestEntity> result = dao.queryBuilder().where(Properties.SimpleInteger.gt(getSimpleInteger(1))).offset(2)
.limit(3).orderAsc(Properties.SimpleInt).list();
assertEquals(3, result.size());
assertEquals(getSimpleInteger(4), result.get(0).getSimpleInteger().intValue());
assertEquals(getSimpleInteger(5), result.get(1).getSimpleInteger().intValue());
assertEquals(getSimpleInteger(6), result.get(2).getSimpleInteger().intValue());
}
public void testQueryOffsetAndLimit() {
insert(10);
Query<TestEntity> query = dao.queryBuilder().where(Properties.SimpleInteger.gt(getSimpleInteger(-1))).offset(-1)
.limit(-1).orderAsc(Properties.SimpleInt).build();
query.setParameter(0, getSimpleInteger(1));
query.setLimit(3);
query.setOffset(2);
List<TestEntity> result = query.list();
assertEquals(3, result.size());
assertEquals(getSimpleInteger(4), result.get(0).getSimpleInteger().intValue());
assertEquals(getSimpleInteger(5), result.get(1).getSimpleInteger().intValue());
assertEquals(getSimpleInteger(6), result.get(2).getSimpleInteger().intValue());
}
public void testQueryBuilderOffsetWithoutLimit() {
try{
dao.queryBuilder().offset(7).orderAsc(Properties.SimpleInt).build();
fail("Offset may not be set alone");
} catch(RuntimeException expected) {
//OK
}
}
public void testQueryLimitAndSetParameter() {
Query<TestEntity> query = dao.queryBuilder().limit(5).offset(1).build();
try{
query.setParameter(0, (Object) null);
fail("Offset/limit parameters must not interfere with user parameters");
} catch(RuntimeException expected) {
//OK
}
}
public void testQueryUnsetLimit() {
Query<TestEntity> query = dao.queryBuilder().build();
try{
query.setLimit(1);
fail("Limit must be defined in builder first");
} catch(RuntimeException expected) {
//OK
}
}
public void testQueryUnsetOffset() {
Query<TestEntity> query = dao.queryBuilder().limit(1).build();
try{
query.setOffset(1);
fail("Offset must be defined in builder first");
} catch(RuntimeException expected) {
//OK
}
}
}
| QueryLimitOffsetTest |
java | dropwizard__dropwizard | dropwizard-core/src/test/java/io/dropwizard/core/setup/BootstrapTest.java | {
"start": 798,
"end": 4623
} | class ____ {
private final Application<Configuration> application = new Application<Configuration>() {
@Override
public void run(Configuration configuration, Environment environment) throws Exception {
}
};
private Bootstrap<Configuration> bootstrap;
@BeforeEach
void setUp() {
bootstrap = new Bootstrap<>(application);
}
@Test
void hasAnApplication() throws Exception {
assertThat(bootstrap.getApplication())
.isEqualTo(application);
}
@Test
void hasAnObjectMapper() throws Exception {
assertThat(bootstrap.getObjectMapper())
.isNotNull();
}
@Test
void hasHealthCheckRegistry() {
assertThat(bootstrap.getHealthCheckRegistry())
.isNotNull();
}
@Test
void defaultsToUsingFilesForConfiguration() throws Exception {
assertThat(bootstrap.getConfigurationSourceProvider())
.isInstanceOfAny(FileConfigurationSourceProvider.class);
}
@Test
void defaultsToUsingTheDefaultClassLoader() throws Exception {
assertThat(bootstrap.getClassLoader())
.isEqualTo(Thread.currentThread().getContextClassLoader());
}
@Test
void comesWithJvmInstrumentation() throws Exception {
bootstrap.registerMetrics();
assertThat(bootstrap.getMetricRegistry().getNames())
.contains("jvm.buffers.mapped.capacity", "jvm.threads.count", "jvm.memory.heap.usage",
"jvm.attribute.vendor", "jvm.classloader.loaded", "jvm.filedescriptor");
}
@Test
void defaultsToDefaultConfigurationFactoryFactory() throws Exception {
assertThat(bootstrap.getConfigurationFactoryFactory())
.isInstanceOf(DefaultConfigurationFactoryFactory.class);
}
@Test
void bringsYourOwnMetricRegistry() {
final MetricRegistry newRegistry = new MetricRegistry() {
@Override
public Histogram histogram(String name) {
Histogram existed = (Histogram) getMetrics().get(name);
return existed != null ? existed : new Histogram(new UniformReservoir());
}
};
bootstrap.setMetricRegistry(newRegistry);
bootstrap.registerMetrics();
assertThat(newRegistry.getNames())
.contains("jvm.buffers.mapped.capacity", "jvm.threads.count", "jvm.memory.heap.usage",
"jvm.attribute.vendor", "jvm.classloader.loaded", "jvm.filedescriptor");
}
@Test
void allowsAccessToJmxReporter() {
final MetricRegistry newRegistry = new MetricRegistry();
bootstrap.setMetricRegistry(newRegistry);
assertThat(bootstrap.getJmxReporter()).isNull();
bootstrap.registerMetrics();
assertThat(bootstrap.getJmxReporter()).isNotNull();
}
@Test
void canUseCustomValidatorFactory() throws Exception {
ValidatorFactory factory = Validation
.byProvider(HibernateValidator.class)
.configure()
.buildValidatorFactory();
bootstrap.setValidatorFactory(factory);
assertThat(bootstrap.getValidatorFactory()).isSameAs(factory);
}
@Test
void canUseCustomObjectMapper() {
final ObjectMapper minimalObjectMapper = Jackson.newMinimalObjectMapper();
bootstrap.setObjectMapper(minimalObjectMapper);
assertThat(bootstrap.getObjectMapper()).isSameAs(minimalObjectMapper);
}
@Test
void canUseCustomHealthCheckRegistry() {
final HealthCheckRegistry healthCheckRegistry = new HealthCheckRegistry();
bootstrap.setHealthCheckRegistry(healthCheckRegistry);
assertThat(bootstrap.getHealthCheckRegistry()).isSameAs(healthCheckRegistry);
}
}
| BootstrapTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/sqlserver/SQLServerWallTest_7.java | {
"start": 901,
"end": 4109
} | class ____ extends TestCase {
public void test_true() throws Exception {
WallProvider provider = new SQLServerWallProvider();
provider.getConfig().setSelectHavingAlwayTrueCheck(true);
assertTrue(provider.checkValid(//
"select * from (select top 3000 jol.id as primaryKeyIds, jo.orderNo as 委托单号, bm.custname as 委托方名称, manuCustInfo.custname as 出证方, jo.chkerDate as 委托日期, bcc.connectName as 联系人, bcc.mobile as 电话,jol.barcode as 样品条码, jol.name as 样品名称, jol.qtysub as 套件数量,jol.OnlyCode as 唯一识别码,jol.manufacture as 制造厂商,jol.sprc as 规格型号, jol.outno as 出厂编号, djod.name as 任务状态, djoc.name 委托类别, dct.name as 证书类型,bdp.organ_name as 分配室组, jol.task_id as 任务ID, be.user_name as 检定人员, verifyUser.user_name as 核验员, checkUser.user_name as 批准员, jol.ddate 交款日期,cjcs.name as 缴款状态, jc.moneys as 应收费, jc.moneystotal as 实收费, be.organ_id from jl_OrderList jol left outer join jl_CheckList jc on jol.checkList_id = jc.id left outer join jl_Order jo on jol.orderid = jo.id left outer join ba_dptinfo bdp on jol.dptid = bdp.id left outer join jl_Task jt on jol.task_id = jt.id left outer join ba_manucustinfo bm on jo.entrustCustId = bm.id left outer join ba_manucustinfo manuCustInfo on jo.custid = manuCustInfo.id left outer join ba_custconnect bcc on jo.linkPerson = bcc.id left outer join ba_employeeinfo be on jt.jdUserId = be.id left outer join ba_employeeinfo verifyUser on jt.verifyUser = verifyUser.id left outer join ba_employeeinfo checkUser on jt.checkUser = checkUser.id left outer join dict_jl_order_doType djod on jol.dotypeid = djod.id left outer join dict_jl_order_class djoc on jo.orderclassid = djoc.id left outer join dict_certificate_type dct on jt.certificatetype = dct.id left outer join dict_jl_chargestatus cjcs on jol.chargestatus = cjcs.id where jol.id >= (select min(primaryKeyId) from (select top 3000 jol.id as primaryKeyId from jl_OrderList jol left outer join jl_CheckList jc on jol.checkList_id = jc.id left outer join jl_Order jo on jol.orderid = jo.id left outer join ba_dptinfo bdp on jol.dptid = bdp.id left outer join jl_Task jt on jol.task_id = jt.id left outer join ba_manucustinfo bm on jo.entrustCustId = bm.id left outer join ba_manucustinfo manuCustInfo on jo.custid = manuCustInfo.id left outer join ba_custconnect bcc on jo.linkPerson = bcc.id left outer join ba_employeeinfo be on jt.jdUserId = be.id left outer join ba_employeeinfo verifyUser on jt.verifyUser = verifyUser.id left outer join ba_employeeinfo checkUser on jt.checkUser = checkUser.id left outer join dict_jl_order_doType djod on jol.dotypeid = djod.id left outer join dict_jl_order_class djoc on jo.orderclassid = djoc.id left outer join dict_certificate_type dct on jt.certificatetype = dct.id left outer join dict_jl_chargestatus cjcs on jol.chargestatus = cjcs.id where jo.chkerDate >= ? and jo.chkerDate <= ? and jol.ddate >= ? order by jol.id desc, jo.id DESC) as T) and jo.chkerDate >= ? and jo.chkerDate <= ? and jol.ddate >= ? order by jol.id asc, jo.id DESC) as T1 order by T1.primaryKeyIds desc"));
// assertEquals(12, provider.getTableStats().size());
// assertTrue(provider.getTableStats().containsKey("jl_OrderList"));
}
}
| SQLServerWallTest_7 |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/channel/EventLoopGroupFactory.java | {
"start": 12727,
"end": 13222
} | class ____.
*
* @param configuration The configuration
* @return A SocketChannel instance.
* @deprecated Use {@link #channelInstance(NettyChannelType, EventLoopGroupConfiguration)} instead
*/
@Deprecated(since = "4.5.0", forRemoval = true)
default @NonNull SocketChannel clientSocketChannelInstance(@Nullable EventLoopGroupConfiguration configuration) {
return (SocketChannel) channelInstance(NettyChannelType.CLIENT_SOCKET, configuration);
}
}
| instance |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/OracleServerConfiguration.java | {
"start": 836,
"end": 1003
} | class ____ extract some initial configuration from the database for {@link OracleDialect}.
*
* @author Marco Belladelli
* @author Loïc Lefèvre
*/
@Internal
public | that |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/collections/LinkedIdentityHashMap.java | {
"start": 4908,
"end": 5505
} | class ____ implements Iterator<Map.Entry<K, V>> {
private Node<K, V> next = head;
private Node<K, V> current = null;
@Override
public boolean hasNext() {
return next != null;
}
@Override
public Node<K, V> next() {
Node<K, V> e = next;
if ( e == null ) {
throw new NoSuchElementException();
}
current = e;
next = e.after;
return e;
}
@Override
public void remove() {
Node<K, V> e = current;
if ( e == null ) {
throw new IllegalStateException();
}
LinkedIdentityHashMap.this.remove( e.key );
current = null;
}
}
final | EntryIterator |
java | reactor__reactor-core | reactor-core/src/blockHoundTest/java/reactor/core/scheduler/ReactorTestBlockHoundIntegration.java | {
"start": 825,
"end": 1066
} | class ____ implements BlockHoundIntegration {
@Override
public void applyTo(BlockHound.Builder builder) {
builder.allowBlockingCallsInside(RaceTestUtils.class.getName(), "lambda$race$2");
}
}
| ReactorTestBlockHoundIntegration |
java | apache__camel | components/camel-ai/camel-qdrant/src/main/java/org/apache/camel/component/qdrant/transform/QdrantEmbeddingsDataTypeTransformer.java | {
"start": 1765,
"end": 2783
} | class ____ extends Transformer {
@Override
public void transform(Message message, DataType fromType, DataType toType) {
Embedding embedding = message.getHeader(CamelLangchain4jAttributes.CAMEL_LANGCHAIN4J_EMBEDDING_VECTOR, Embedding.class);
TextSegment text = message.getBody(TextSegment.class);
Common.PointId id
= message.getHeader(Qdrant.Headers.POINT_ID, () -> PointIdFactory.id(UUID.randomUUID()), Common.PointId.class);
var builder = Points.PointStruct.newBuilder();
builder.setId(id);
builder.setVectors(VectorsFactory.vectors(embedding.vector()));
if (text != null) {
builder.putPayload("text_segment", ValueFactory.value(text.text()));
Metadata metadata = text.metadata();
metadata.toMap()
.forEach((key, value) -> builder.putPayload(key, ValueFactory.value((String) value)));
}
message.setBody(builder.build());
}
}
| QdrantEmbeddingsDataTypeTransformer |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/MonoLog.java | {
"start": 1190,
"end": 1945
} | class ____<T> extends InternalMonoOperator<T, T> {
final SignalPeek<T> log;
MonoLog(Mono<? extends T> source, SignalPeek<T> log) {
super(source);
this.log = log;
}
@Override
public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super T> actual) {
if (actual instanceof ConditionalSubscriber) {
@SuppressWarnings("unchecked") // javac, give reason to suppress because inference anomalies
ConditionalSubscriber<T> s2 = (ConditionalSubscriber<T>) actual;
return new PeekConditionalSubscriber<>(s2, log);
}
return new FluxPeek.PeekSubscriber<>(actual, log);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
} | MonoLog |
java | elastic__elasticsearch | build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java | {
"start": 10225,
"end": 11012
} | class ____ {
public static final TransportVersion V_1_0_0 = def(1_000_0_00);
public static final TransportVersion V_1_1_0 = def(1_001_0_00);
public static final TransportVersion V_1_2_0 = def(1_002_0_00);
public static final TransportVersion V_1_2_1 = def(1_002_0_01);
public static final TransportVersion V_1_2_2 = def(1_002_0_02);
public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00);
public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00);
public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0;
}
""";
var expectedTransportVersions = """
public | TransportVersions |
java | google__guice | core/test/com/google/inject/example/JndiProviderClient.java | {
"start": 943,
"end": 1494
} | class ____ {
public static void main(String[] args) throws CreationException {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
// Bind Context to the default InitialContext.
bind(Context.class).to(InitialContext.class);
// Bind to DataSource from JNDI.
bind(DataSource.class).toProvider(fromJndi(DataSource.class, "..."));
}
});
}
}
| JndiProviderClient |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/ReduceTaskTest.java | {
"start": 13050,
"end": 13922
} | class ____ extends RichGroupReduceFunction<Record, Record> {
private static final long serialVersionUID = 1L;
private int cnt = 0;
private final IntValue key = new IntValue();
private final IntValue value = new IntValue();
@Override
public void reduce(Iterable<Record> records, Collector<Record> out) {
Record element = null;
int valCnt = 0;
for (Record next : records) {
element = next;
valCnt++;
}
if (++this.cnt >= 10) {
throw new ExpectedTestException();
}
element.getField(0, this.key);
this.value.setValue(valCnt - this.key.getValue());
element.setField(1, this.value);
out.collect(element);
}
}
public static | MockFailingReduceStub |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/processor/CdiComponentProcessor.java | {
"start": 792,
"end": 2427
} | class ____ extends AnnotationBasedComponentModelProcessor {
@Override
protected String getComponentModelIdentifier() {
return MappingConstantsGem.ComponentModelGem.CDI;
}
@Override
protected List<Annotation> getTypeAnnotations(Mapper mapper) {
return Collections.singletonList(
new Annotation( getType( "ApplicationScoped" ) )
);
}
@Override
protected List<Annotation> getMapperReferenceAnnotations() {
return Arrays.asList( new Annotation( getType( "Inject" ) ) );
}
@Override
protected boolean requiresGenerationOfDecoratorClass() {
return false;
}
@Override
protected boolean additionalPublicEmptyConstructor() {
return true;
}
private Type getType(String simpleName) {
String javaxPrefix = "javax.inject.";
String jakartaPrefix = "jakarta.inject.";
if ( "ApplicationScoped".equals( simpleName ) ) {
javaxPrefix = "javax.enterprise.context.";
jakartaPrefix = "jakarta.enterprise.context.";
}
if ( getTypeFactory().isTypeAvailable( javaxPrefix + simpleName ) ) {
return getTypeFactory().getType( javaxPrefix + simpleName );
}
if ( getTypeFactory().isTypeAvailable( jakartaPrefix + simpleName ) ) {
return getTypeFactory().getType( jakartaPrefix + simpleName );
}
throw new AnnotationProcessingException(
"Couldn't find any of the CDI or Jakarta CDI Dependency types." +
" Are you missing a dependency on your classpath?" );
}
}
| CdiComponentProcessor |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CpsDoesNotUseSkipUnavailableIT.java | {
"start": 1501,
"end": 1661
} | class ____ extends AbstractMultiClustersTestCase {
private static final String LINKED_CLUSTER_1 = "cluster-a";
public static | CpsDoesNotUseSkipUnavailableIT |
java | apache__flink | flink-metrics/flink-metrics-influxdb/src/main/java/org/apache/flink/metrics/influxdb/InfluxdbReporterOptions.java | {
"start": 5775,
"end": 6049
} | enum ____ {
HTTP("http"),
HTTPS("https");
private final String scheme;
Scheme(String scheme) {
this.scheme = scheme;
}
@Override
public String toString() {
return scheme;
}
}
}
| Scheme |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/aggregate/H2AggregateSupport.java | {
"start": 10771,
"end": 11583
} | class ____ extends AggregateJsonWriteExpression
implements WriteExpressionRenderer {
private final String path;
RootJsonWriteExpression(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
this.path = aggregateColumn.getSelectionExpression();
initializeSubExpressions( aggregateColumn, columns );
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String basePath;
if ( qualifier == null || qualifier.isBlank() ) {
basePath = path;
}
else {
basePath = qualifier + "." + path;
}
append( sqlAppender, "(" + basePath + ")", translator, aggregateColumnWriteExpression );
}
}
private static | RootJsonWriteExpression |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/jaxb/mapping/spi/JaxbEntityOrMappedSuperclass.java | {
"start": 221,
"end": 333
} | interface ____ commonality between entity and mapped-superclass mappings
*
* @author Steve Ebersole
*/
public | for |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/resource/drawable/NonOwnedDrawableResource.java | {
"start": 405,
"end": 1266
} | class ____ extends DrawableResource<Drawable> {
@SuppressWarnings("unchecked")
@Nullable
static Resource<Drawable> newInstance(@Nullable Drawable drawable) {
return drawable != null ? new NonOwnedDrawableResource(drawable) : null;
}
private NonOwnedDrawableResource(Drawable drawable) {
super(drawable);
}
@NonNull
@SuppressWarnings("unchecked")
@Override
public Class<Drawable> getResourceClass() {
return (Class<Drawable>) drawable.getClass();
}
@Override
public int getSize() {
// 4 bytes per pixel for ARGB_8888 Bitmaps is something of a reasonable approximation. If
// there are no intrinsic bounds, we can fall back just to 1.
return Math.max(1, drawable.getIntrinsicWidth() * drawable.getIntrinsicHeight() * 4);
}
@Override
public void recycle() {
// Do nothing.
}
}
| NonOwnedDrawableResource |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/field/samename/v1/Foo.java | {
"start": 247,
"end": 365
} | class ____ extends Bar {
@Inject
protected Abc abc;
public Abc getFooAbc() {
return abc;
}
}
| Foo |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/Mockito.java | {
"start": 26085,
"end": 26526
} | class ____ {
*
* @Mock private ArticleCalculator calculator;
* @Mock private ArticleDatabase database;
* @Mock private UserProvider userProvider;
*
* private ArticleManager manager;
*
* @org.junit.jupiter.api.Test
* void testSomethingInJunit5(@Mock ArticleDatabase database) {
* </code></pre>
*
* <b>Important!</b> This needs to be somewhere in the base | ArticleManagerTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_Issue_535.java | {
"start": 697,
"end": 1094
} | class ____ {
private String a;
private BigDecimal b;
// getter and setter
public String getA() {
return a;
}
public void setA(String a) {
this.a = a;
}
public BigDecimal getB() {
return b;
}
public void setB(BigDecimal b) {
this.b = b;
}
}
}
| TestPOJO |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/main/java/io/quarkus/rest/client/reactive/deployment/RestClientReactiveProcessor.java | {
"start": 45334,
"end": 47704
} | interface ____ the generated provider since it's going to be handled now
// the remaining entries will be handled later
List<GeneratedClassResult> providers = generatedProviders.remove(ifaceName);
for (GeneratedClassResult classResult : providers) {
mc.invokeInterfaceMethod(MAP_PUT, map, mc.loadClass(classResult.generatedClassName),
mc.load(classResult.priority));
}
}
addProviders(mc, ifaceName, map);
mc.returnVoid();
constructor.invokeVirtualMethod(mc.getMethodDescriptor(), constructor.getThis());
i++;
}
for (Map.Entry<String, List<GeneratedClassResult>> entry : generatedProviders.entrySet()) {
ResultHandle map = constructor.newInstance(MethodDescriptor.ofConstructor(HashMap.class));
for (GeneratedClassResult classResult : entry.getValue()) {
constructor.invokeInterfaceMethod(MAP_PUT, map, constructor.loadClass(classResult.generatedClassName),
constructor.load(classResult.priority));
addProviders(constructor, entry.getKey(), map);
}
}
}
private void addProviders(MethodCreator mc, String providerClass, ResultHandle map) {
mc.invokeVirtualMethod(
MethodDescriptor.ofMethod(AnnotationRegisteredProviders.class, "addProviders", void.class, String.class,
Map.class),
mc.getThis(), mc.load(providerClass), map);
}
private int getAnnotatedPriority(IndexView index, String className, int defaultPriority) {
ClassInfo providerClass = index.getClassByName(DotName.createSimple(className));
int priority = defaultPriority;
if (providerClass == null) {
log.warnv("Unindexed provider class {0}. The priority of the provider will be set to {1}. ", className,
defaultPriority);
} else {
AnnotationInstance priorityAnnoOnProvider = providerClass.declaredAnnotation(ResteasyReactiveDotNames.PRIORITY);
if (priorityAnnoOnProvider != null) {
priority = priorityAnnoOnProvider.value().asInt();
}
}
return priority;
}
// By default, Kotlin does not use Java | from |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/KafkaConsumer.java | {
"start": 31000,
"end": 35145
} | class ____ implements Runnable {
* private final AtomicBoolean closed = new AtomicBoolean(false);
* private final KafkaConsumer consumer;
*
* public KafkaConsumerRunner(KafkaConsumer consumer) {
* this.consumer = consumer;
* }
*
* {@literal}@Override
* public void run() {
* try {
* consumer.subscribe(Arrays.asList("topic"));
* while (!closed.get()) {
* ConsumerRecords records = consumer.poll(Duration.ofMillis(10000));
* // Handle new records
* }
* } catch (WakeupException e) {
* // Ignore exception if closing
* if (!closed.get()) throw e;
* } finally {
* consumer.close();
* }
* }
*
* // Shutdown hook which can be called from a separate thread
* public void shutdown() {
* closed.set(true);
* consumer.wakeup();
* }
* }
* </pre>
*
* Then in a separate thread, the consumer can be shutdown by setting the closed flag and waking up the consumer.
*
* <p>
* <pre>
* closed.set(true);
* consumer.wakeup();
* </pre>
*
* <p>
* Note that while it is possible to use thread interrupts instead of {@link #wakeup()} to abort a blocking operation
* (in which case, {@link InterruptException} will be raised), we discourage their use since they may cause a clean
* shutdown of the consumer to be aborted. Interrupts are mainly supported for those cases where using {@link #wakeup()}
* is impossible, e.g. when a consumer thread is managed by code that is unaware of the Kafka client.
*
* <p>
* We have intentionally avoided implementing a particular threading model for processing. This leaves several
* options for implementing multi-threaded processing of records.
*
* <h4>1. One Consumer Per Thread</h4>
*
* A simple option is to give each thread its own consumer instance. Here are the pros and cons of this approach:
* <ul>
* <li><b>PRO</b>: It is the easiest to implement
* <li><b>PRO</b>: It is often the fastest as no inter-thread co-ordination is needed
* <li><b>PRO</b>: It makes in-order processing on a per-partition basis very easy to implement (each thread just
* processes messages in the order it receives them).
* <li><b>CON</b>: More consumers means more TCP connections to the cluster (one per thread). In general Kafka handles
* connections very efficiently so this is generally a small cost.
* <li><b>CON</b>: Multiple consumers means more requests being sent to the server and slightly less batching of data
* which can cause some drop in I/O throughput.
* <li><b>CON</b>: The number of total threads across all processes will be limited by the total number of partitions.
* </ul>
*
* <h4>2. Decouple Consumption and Processing</h4>
*
* Another alternative is to have one or more consumer threads that do all data consumption and hands off
* {@link ConsumerRecords} instances to a blocking queue consumed by a pool of processor threads that actually handle
* the record processing.
*
* This option likewise has pros and cons:
* <ul>
* <li><b>PRO</b>: This option allows independently scaling the number of consumers and processors. This makes it
* possible to have a single consumer that feeds many processor threads, avoiding any limitation on partitions.
* <li><b>CON</b>: Guaranteeing order across the processors requires particular care as the threads will execute
* independently an earlier chunk of data may actually be processed after a later chunk of data just due to the luck of
* thread execution timing. For processing that has no ordering requirements this is not a problem.
* <li><b>CON</b>: Manually committing the position becomes harder as it requires that all threads co-ordinate to ensure
* that processing is complete for that partition.
* </ul>
*
* There are many possible variations on this approach. For example each processor thread can have its own queue, and
* the consumer threads can hash into these queues using the TopicPartition to ensure in-order consumption and simplify
* commit.
*/
public | KafkaConsumerRunner |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/lifecycle/DefaultLifecycles.java | {
"start": 1577,
"end": 6803
} | class ____ {
public static final String[] STANDARD_LIFECYCLES = {"clean", "default", "site"};
private final Logger logger = LoggerFactory.getLogger(getClass());
// @Configuration(source="org/apache/maven/lifecycle/lifecycles.xml")
private final Lookup lookup;
private final LifecycleRegistry registry;
private Map<String, Lifecycle> customLifecycles;
private boolean lifecyclesPrinted;
public DefaultLifecycles() {
this.lookup = null;
this.registry = null;
}
/**
* @deprecated Use {@link #DefaultLifecycles(LifecycleRegistry,Lookup)} instead
*/
@Deprecated
public DefaultLifecycles(Map<String, Lifecycle> lifecycles, org.codehaus.plexus.logging.Logger logger) {
this.customLifecycles = lifecycles;
this.lookup = null;
this.registry = null;
}
@Inject
public DefaultLifecycles(LifecycleRegistry registry, Lookup lookup) {
this.lookup = lookup;
this.registry = registry;
}
/**
* Get lifecycle based on phase
*
* @param phase
* @return
*/
public Lifecycle get(String phase) {
return getPhaseToLifecycleMap().get(phase);
}
/**
* We use this to map all phases to the lifecycle that contains it. This is used so that a user can specify the
* phase they want to execute, and we can easily determine what lifecycle we need to run.
*
* @return A map of lifecycles, indexed on id
*/
public Map<String, Lifecycle> getPhaseToLifecycleMap() {
if (logger.isDebugEnabled() && !lifecyclesPrinted) {
for (Lifecycle lifecycle : getLifeCycles()) {
logger.debug("Lifecycle {}", lifecycle);
}
lifecyclesPrinted = true;
}
// If people are going to make their own lifecycles then we need to tell people how to namespace them correctly
// so that they don't interfere with internally defined lifecycles.
Map<String, Lifecycle> phaseToLifecycleMap = new HashMap<>();
for (Lifecycle lifecycle : getLifeCycles()) {
for (String phase : lifecycle.getPhases()) {
// The first definition wins.
Lifecycle original = phaseToLifecycleMap.put(phase, lifecycle);
if (original != null && logger.isWarnEnabled()) {
logger.warn(
"Duplicated lifecycle phase {}. Defined in {} but also in {}",
phase,
original.getId(),
lifecycle.getId());
}
}
if (lifecycle.getDelegate() != null) {
for (org.apache.maven.api.Lifecycle.Alias alias :
lifecycle.getDelegate().aliases()) {
Lifecycle original = phaseToLifecycleMap.put(alias.v3Phase(), lifecycle);
if (original != null && logger.isWarnEnabled()) {
logger.warn(
"Duplicated lifecycle phase {}. Defined in {} but also in {}",
alias.v3Phase(),
original.getId(),
lifecycle.getId());
}
}
}
}
return phaseToLifecycleMap;
}
/**
* Returns an ordered list of lifecycles
*/
public List<Lifecycle> getLifeCycles() {
List<String> lifecycleIds = Arrays.asList(STANDARD_LIFECYCLES);
Comparator<String> comparator = (l, r) -> {
int lx = lifecycleIds.indexOf(l);
int rx = lifecycleIds.indexOf(r);
if (lx < 0 || rx < 0) {
return rx - lx;
} else {
return lx - rx;
}
};
Map<String, Lifecycle> lifecyclesMap = lookupLifecycles();
// ensure canonical order of standard lifecycles
return lifecyclesMap.values().stream()
.peek(l -> Objects.requireNonNull(l.getId(), "A lifecycle must have an id."))
.sorted(Comparator.comparing(Lifecycle::getId, comparator))
.collect(Collectors.toList());
}
private Map<String, Lifecycle> lookupLifecycles() {
// TODO: Remove the following code when maven-compat is gone
// This code is here to ensure maven-compat's EmptyLifecycleExecutor keeps on working.
if (lookup == null) {
return customLifecycles != null ? customLifecycles : new HashMap<>();
}
// Lifecycles cannot be cached as extensions might add custom lifecycles later in the execution.
try {
return registry != null
? registry.stream().collect(Collectors.toMap(lf -> lf.id(), lf -> new Lifecycle(registry, lf)))
: Map.of();
} catch (LookupException e) {
throw new IllegalStateException("Unable to lookup lifecycles from the plexus container", e);
}
}
public String getLifecyclePhaseList() {
return getLifeCycles().stream().flatMap(l -> l.getPhases().stream()).collect(Collectors.joining(", "));
}
}
| DefaultLifecycles |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/IndicativeSentencesOnSubClassScenarioOneTestCase.java | {
"start": 480,
"end": 585
} | class ____ extends IndicativeSentencesOnSubClassTestCase {
}
| IndicativeSentencesOnSubClassScenarioOneTestCase |
java | apache__camel | components/camel-jcr/src/generated/java/org/apache/camel/component/jcr/JcrConverterLoader.java | {
"start": 879,
"end": 4935
} | class ____ implements TypeConverterLoader, CamelContextAware {
private CamelContext camelContext;
public JcrConverterLoader() {
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException {
registerConverters(registry);
}
private void registerConverters(TypeConverterRegistry registry) {
addTypeConverter(registry, java.io.InputStream.class, javax.jcr.Value.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.jcr.JcrConverter.toValue((javax.jcr.Value) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, java.lang.Boolean.class, javax.jcr.Value.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.jcr.JcrConverter.toBoolean((javax.jcr.Value) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, java.lang.String.class, javax.jcr.Value.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.jcr.JcrConverter.toString((javax.jcr.Value) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, java.util.Calendar.class, javax.jcr.Value.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.jcr.JcrConverter.toCalendar((javax.jcr.Value) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, javax.jcr.Value.class, java.io.InputStream.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.jcr.JcrConverter.toValue((java.io.InputStream) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, javax.jcr.Value.class, java.lang.Boolean.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.jcr.JcrConverter.toValue((java.lang.Boolean) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, javax.jcr.Value.class, java.lang.String.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.jcr.JcrConverter.toValue((java.lang.String) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, javax.jcr.Value.class, java.util.Calendar.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.jcr.JcrConverter.toValue((java.util.Calendar) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
}
private static void addTypeConverter(TypeConverterRegistry registry, Class<?> toType, Class<?> fromType, boolean allowNull, SimpleTypeConverter.ConversionMethod method) {
registry.addTypeConverter(toType, fromType, new SimpleTypeConverter(allowNull, method));
}
}
| JcrConverterLoader |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java | {
"start": 2149,
"end": 16345
} | class ____ extends Shell {
private int runCount = 0;
private Command(long interval) {
super(interval);
}
@Override
protected String[] getExecString() {
// There is no /bin/echo equivalent on Windows so just launch it as a
// shell built-in.
//
return WINDOWS ?
(new String[] {"cmd.exe", "/c", "echo", "hello"}) :
(new String[] {"echo", "hello"});
}
@Override
protected void parseExecResult(BufferedReader lines) throws IOException {
++runCount;
}
public int getRunCount() {
return runCount;
}
}
@BeforeEach
public void setup(TestInfo testInfo) {
rootTestDir.mkdirs();
assertTrue(rootTestDir.isDirectory(), "Not a directory " + rootTestDir);
methodDir = new File(rootTestDir, testInfo.getDisplayName());
}
@Test
public void testInterval() throws IOException {
testInterval(Long.MIN_VALUE / 60000); // test a negative interval
testInterval(0L); // test a zero interval
testInterval(10L); // interval equal to 10mins
testInterval(Time.now() / 60000 + 60); // test a very big interval
}
/**
* Assert that a string has a substring in it
* @param string string to search
* @param search what to search for it
*/
private void assertInString(String string, String search) {
assertNotNull(string, "Empty String");
if (!string.contains(search)) {
fail("Did not find \"" + search + "\" in " + string);
}
}
@Test
public void testShellCommandExecutorToString() throws Throwable {
Shell.ShellCommandExecutor sce=new Shell.ShellCommandExecutor(
new String[] { "ls", "..","arg 2"});
String command = sce.toString();
assertInString(command,"ls");
assertInString(command, " .. ");
assertInString(command, "\"arg 2\"");
}
@Test
public void testShellCommandTimeout() throws Throwable {
assumeFalse(WINDOWS);
String rootDir = rootTestDir.getAbsolutePath();
File shellFile = new File(rootDir, "timeout.sh");
String timeoutCommand = "sleep 4; echo \"hello\"";
Shell.ShellCommandExecutor shexc;
try (PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile))) {
writer.println(timeoutCommand);
writer.close();
}
FileUtil.setExecutable(shellFile, true);
shexc = new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
null, null, 100);
try {
shexc.execute();
} catch (Exception e) {
//When timing out exception is thrown.
}
shellFile.delete();
assertTrue(shexc.isTimedOut(), "Script did not timeout");
}
@Test
public void testEnvVarsWithInheritance() throws Exception {
assumeFalse(WINDOWS);
testEnvHelper(true);
}
@Test
public void testEnvVarsWithoutInheritance() throws Exception {
assumeFalse(WINDOWS);
testEnvHelper(false);
}
private void testEnvHelper(boolean inheritParentEnv) throws Exception {
Map<String, String> customEnv = new HashMap<>();
customEnv.put("AAA" + System.currentTimeMillis(), "AAA");
customEnv.put("BBB" + System.currentTimeMillis(), "BBB");
customEnv.put("CCC" + System.currentTimeMillis(), "CCC");
Shell.ShellCommandExecutor command = new ShellCommandExecutor(
new String[]{"env"}, null, customEnv, 0L, inheritParentEnv);
command.execute();
String[] varsArr = command.getOutput().split("\n");
Map<String, String> vars = new HashMap<>();
for (String var : varsArr) {
int eqIndex = var.indexOf('=');
vars.put(var.substring(0, eqIndex), var.substring(eqIndex + 1));
}
Map<String, String> expectedEnv = new HashMap<>();
expectedEnv.putAll(customEnv);
if (inheritParentEnv) {
expectedEnv.putAll(System.getenv());
}
assertEquals(expectedEnv, vars);
}
private static int countTimerThreads() {
ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
int count = 0;
ThreadInfo[] infos = threadBean.getThreadInfo(threadBean.getAllThreadIds(), 20);
for (ThreadInfo info : infos) {
if (info == null) continue;
for (StackTraceElement elem : info.getStackTrace()) {
if (elem.getClassName().contains("Timer")) {
count++;
break;
}
}
}
return count;
}
@Test
public void testShellCommandTimerLeak() throws Exception {
String quickCommand[] = new String[] {"/bin/sleep", "100"};
int timersBefore = countTimerThreads();
System.err.println("before: " + timersBefore);
for (int i = 0; i < 10; i++) {
Shell.ShellCommandExecutor shexec = new Shell.ShellCommandExecutor(
quickCommand, null, null, 1);
try {
shexec.execute();
fail("Bad command should throw exception");
} catch (Exception e) {
// expected
}
}
Thread.sleep(1000);
int timersAfter = countTimerThreads();
System.err.println("after: " + timersAfter);
assertEquals(timersBefore, timersAfter);
}
@Test
public void testGetCheckProcessIsAliveCommand() throws Exception {
String anyPid = "9999";
String[] checkProcessAliveCommand = getCheckProcessIsAliveCommand(
anyPid);
String[] expectedCommand;
if (Shell.WINDOWS) {
expectedCommand =
new String[]{getWinUtilsPath(), "task", "isAlive", anyPid };
} else if (Shell.isSetsidAvailable) {
expectedCommand = new String[] { "bash", "-c", "kill -0 -- -'" +
anyPid + "'"};
} else {
expectedCommand = new String[] {"bash", "-c", "kill -0 '" + anyPid +
"'" };
}
assertArrayEquals(expectedCommand, checkProcessAliveCommand);
}
@Test
public void testGetSignalKillCommand() throws Exception {
String anyPid = "9999";
int anySignal = 9;
String[] checkProcessAliveCommand = getSignalKillCommand(anySignal,
anyPid);
String[] expectedCommand;
if (Shell.WINDOWS) {
expectedCommand =
new String[]{getWinUtilsPath(), "task", "kill", anyPid };
} else if (Shell.isSetsidAvailable) {
expectedCommand = new String[] { "bash", "-c", "kill -9 -- -'" + anyPid +
"'"};
} else {
expectedCommand = new String[]{ "bash", "-c", "kill -9 '" + anyPid +
"'"};
}
assertArrayEquals(expectedCommand, checkProcessAliveCommand);
}
private void testInterval(long interval) throws IOException {
Command command = new Command(interval);
command.run();
assertEquals(1, command.getRunCount());
command.run();
if (interval > 0) {
assertEquals(1, command.getRunCount());
} else {
assertEquals(2, command.getRunCount());
}
}
@Test
public void testHadoopHomeUnset() throws Throwable {
assertHomeResolveFailed(null, "unset");
}
@Test
public void testHadoopHomeEmpty() throws Throwable {
assertHomeResolveFailed("", E_HADOOP_PROPS_EMPTY);
}
@Test
public void testHadoopHomeEmptyDoubleQuotes() throws Throwable {
assertHomeResolveFailed("\"\"", E_HADOOP_PROPS_EMPTY);
}
@Test
public void testHadoopHomeEmptySingleQuote() throws Throwable {
assertHomeResolveFailed("\"", E_HADOOP_PROPS_EMPTY);
}
@Test
public void testHadoopHomeValid() throws Throwable {
File f = checkHadoopHomeInner(rootTestDir.getCanonicalPath());
assertEquals(rootTestDir, f);
}
@Test
public void testHadoopHomeValidQuoted() throws Throwable {
File f = checkHadoopHomeInner('"'+ rootTestDir.getCanonicalPath() + '"');
assertEquals(rootTestDir, f);
}
@Test
public void testHadoopHomeNoDir() throws Throwable {
assertHomeResolveFailed(methodDir.getCanonicalPath(), E_DOES_NOT_EXIST);
}
@Test
public void testHadoopHomeNotADir() throws Throwable {
File touched = touch(methodDir);
try {
assertHomeResolveFailed(touched.getCanonicalPath(), E_NOT_DIRECTORY);
} finally {
FileUtils.deleteQuietly(touched);
}
}
@Test
public void testHadoopHomeRelative() throws Throwable {
assertHomeResolveFailed("./target", E_IS_RELATIVE);
}
@Test
public void testBinDirMissing() throws Throwable {
FileNotFoundException ex = assertWinutilsResolveFailed(methodDir,
E_DOES_NOT_EXIST);
assertInString(ex.toString(), "Hadoop bin directory");
}
@Test
public void testHadoopBinNotADir() throws Throwable {
File bin = new File(methodDir, "bin");
touch(bin);
try {
assertWinutilsResolveFailed(methodDir, E_NOT_DIRECTORY);
} finally {
FileUtils.deleteQuietly(methodDir);
}
}
@Test
public void testBinWinUtilsFound() throws Throwable {
try {
File bin = new File(methodDir, "bin");
File winutils = new File(bin, WINUTILS_EXE);
touch(winutils);
assertEquals(winutils.getCanonicalPath(),
getQualifiedBinInner(methodDir, WINUTILS_EXE).getCanonicalPath());
} finally {
FileUtils.deleteQuietly(methodDir);
}
}
@Test
public void testBinWinUtilsNotAFile() throws Throwable {
try {
File bin = new File(methodDir, "bin");
File winutils = new File(bin, WINUTILS_EXE);
winutils.mkdirs();
assertWinutilsResolveFailed(methodDir, E_NOT_EXECUTABLE_FILE);
} finally {
FileUtils.deleteDirectory(methodDir);
}
}
/**
* This test takes advantage of the invariant winutils path is valid
* or access to it will raise an exception holds on Linux, and without
* any winutils binary even if HADOOP_HOME points to a real hadoop
* directory, the exception reporting can be validated
*/
@Test
public void testNoWinutilsOnUnix() throws Throwable {
assumeFalse(WINDOWS);
try {
getWinUtilsFile();
} catch (FileNotFoundException ex) {
assertExContains(ex, E_NOT_A_WINDOWS_SYSTEM);
}
try {
getWinUtilsPath();
} catch (RuntimeException ex) {
assertExContains(ex, E_NOT_A_WINDOWS_SYSTEM);
if ( ex.getCause() == null
|| !(ex.getCause() instanceof FileNotFoundException)) {
throw ex;
}
}
}
/**
* Touch a file; creating parent dirs on demand.
* @param path path of file
* @return the file created
* @throws IOException on any failure to write
*/
private File touch(File path) throws IOException {
path.getParentFile().mkdirs();
FileUtils.writeByteArrayToFile(path, new byte[]{});
return path;
}
/**
* Assert that an attept to resolve the hadoop home dir failed with
* an expected text in the exception string value.
* @param path input
* @param expectedText expected exception text
* @return the caught exception
* @throws FileNotFoundException any FileNotFoundException that was thrown
* but which did not contain the expected text
*/
private FileNotFoundException assertHomeResolveFailed(String path,
String expectedText) throws Exception {
try {
File f = checkHadoopHomeInner(path);
fail("Expected an exception with the text `" + expectedText + "`"
+ " -but got the path " + f);
// unreachable
return null;
} catch (FileNotFoundException ex) {
assertExContains(ex, expectedText);
return ex;
}
}
/**
* Assert that an attept to resolve the {@code bin/winutils.exe} failed with
* an expected text in the exception string value.
* @param hadoopHome hadoop home directory
* @param expectedText expected exception text
* @return the caught exception
* @throws Exception any Exception that was thrown
* but which did not contain the expected text
*/
private FileNotFoundException assertWinutilsResolveFailed(File hadoopHome,
String expectedText) throws Exception {
try {
File f = getQualifiedBinInner(hadoopHome, WINUTILS_EXE);
fail("Expected an exception with the text `" + expectedText + "`"
+ " -but got the path " + f);
// unreachable
return null;
} catch (FileNotFoundException ex) {
assertExContains(ex, expectedText);
return ex;
}
}
private void assertExContains(Exception ex, String expectedText)
throws Exception {
if (!ex.toString().contains(expectedText)) {
throw ex;
}
}
@Test
public void testBashQuote() {
assertEquals("'foobar'", Shell.bashQuote("foobar"));
assertEquals("'foo'\\''bar'", Shell.bashQuote("foo'bar"));
assertEquals("''\\''foo'\\''bar'\\'''", Shell.bashQuote("'foo'bar'"));
}
@Test
@Timeout(value = 120)
public void testDestroyAllShellProcesses() throws Throwable {
assumeFalse(WINDOWS);
StringBuilder sleepCommand = new StringBuilder();
sleepCommand.append("sleep 200");
String[] shellCmd = {"bash", "-c", sleepCommand.toString()};
final ShellCommandExecutor shexc1 = new ShellCommandExecutor(shellCmd);
final ShellCommandExecutor shexc2 = new ShellCommandExecutor(shellCmd);
SubjectInheritingThread shellThread1 = new SubjectInheritingThread() {
@Override
public void work() {
try {
shexc1.execute();
} catch(IOException ioe) {
//ignore IOException from thread interrupt
}
}
};
SubjectInheritingThread shellThread2 = new SubjectInheritingThread() {
@Override
public void work() {
try {
shexc2.execute();
} catch(IOException ioe) {
//ignore IOException from thread interrupt
}
}
};
shellThread1.start();
shellThread2.start();
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return shexc1.getProcess() != null;
}
}, 10, 10000);
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return shexc2.getProcess() != null;
}
}, 10, 10000);
Shell.destroyAllShellProcesses();
shexc1.getProcess().waitFor();
shexc2.getProcess().waitFor();
}
@Test
public void testIsJavaVersionAtLeast() {
assertTrue(Shell.isJavaVersionAtLeast(8));
}
@Test
public void testIsBashSupported() throws InterruptedIOException {
assumeTrue(Shell.checkIsBashSupported(), "Bash is not supported");
}
}
| Command |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/validation/beanvalidation/MethodValidationProxyTests.java | {
"start": 8436,
"end": 9081
} | class ____ implements MethodInterceptor {
private final MyValidBean myValidBean = new MyValidBean();
@Override
public @Nullable Object invoke(MethodInvocation invocation) {
Method method;
try {
method = ClassUtils.getMethod(MyValidBean.class, invocation.getMethod().getName(), (Class<?>[]) null);
}
catch (IllegalStateException ex) {
method = BridgeMethodResolver.findBridgedMethod(
ClassUtils.getMostSpecificMethod(invocation.getMethod(), MyValidBean.class));
}
return ReflectionUtils.invokeMethod(method, this.myValidBean, invocation.getArguments());
}
}
public | MyValidClientInterfaceMethodInterceptor |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/transaction/TestContextTransactionUtils.java | {
"start": 9065,
"end": 9738
} | class ____ test method to build the name of the transaction.
* @param testContext the {@code TestContext} upon which to base the name
* @param targetAttribute the {@code TransactionAttribute} to delegate to
* @return the delegating {@code TransactionAttribute}
*/
public static TransactionAttribute createDelegatingTransactionAttribute(
TestContext testContext, TransactionAttribute targetAttribute) {
return createDelegatingTransactionAttribute(testContext, targetAttribute, true);
}
/**
* Create a delegating {@link TransactionAttribute} for the supplied target
* {@link TransactionAttribute} and {@link TestContext}, using the names of
* the test | and |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/logging/LogRuntimeConfig.java | {
"start": 5061,
"end": 6893
} | interface ____ {
/**
* Default file name where logs should be stored.
*/
String DEFAULT_LOG_FILE_NAME = "quarkus.log";
/**
* If file logging should be enabled
*/
@WithDefault("false")
boolean enabled();
/**
* If file logging should be enabled
*
* @deprecated use {@code quarkus.log.file.enabled} instead
*/
// when dropping this property also adjust OpenTelemetryLogHandler
@Deprecated(since = "3.26", forRemoval = true)
Optional<Boolean> enable();
/**
* The log format
*/
@WithDefault("%d{yyyy-MM-dd HH:mm:ss,SSS} %h %N[%i] %-5p [%c{3.}] (%t) %s%e%n")
String format();
/**
* The level of logs to be written into the file.
*/
@WithDefault("ALL")
@WithConverter(LevelConverter.class)
Level level();
/**
* The name of the file in which logs will be written.
*/
@WithDefault(DEFAULT_LOG_FILE_NAME)
File path();
/**
* The name of the filter to link to the file handler.
*/
Optional<String> filter();
/**
* The character encoding used
*/
Optional<@WithConverter(CharsetConverter.class) Charset> encoding();
/**
* File async logging config
*/
AsyncConfig async();
/**
* File rotation config.
* The time interval is determined by the content of the <code>fileSuffix</code> property.
* The size interval is determined by the content of the <code>maxFileSize</code> property.
* If both are used, the rotating will be based on time, then on size.
*/
RotationConfig rotation();
| FileConfig |
java | elastic__elasticsearch | x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionResponseTests.java | {
"start": 486,
"end": 1117
} | class ____ extends AbstractWireSerializingTestCase<PutFollowAction.Response> {
@Override
protected Writeable.Reader<PutFollowAction.Response> instanceReader() {
return PutFollowAction.Response::new;
}
@Override
protected PutFollowAction.Response createTestInstance() {
return new PutFollowAction.Response(randomBoolean(), randomBoolean(), randomBoolean());
}
@Override
protected PutFollowAction.Response mutateInstance(PutFollowAction.Response instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
| PutFollowActionResponseTests |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/AbstractAssertBaseTest.java | {
"start": 806,
"end": 988
} | class ____ extends BaseTestTemplate<ConcreteAssert, Object> {
@Override
protected ConcreteAssert create_assertions() {
return new ConcreteAssert(6L);
}
}
| AbstractAssertBaseTest |
java | google__guava | android/guava/src/com/google/common/collect/Multimaps.java | {
"start": 17115,
"end": 18656
} | class ____ {@code factory.get()} does.
*
* <p>The multimap is serializable if {@code map}, {@code factory}, the sets generated by {@code
* factory}, and the multimap contents are all serializable.
*
* <p>The multimap is not threadsafe when any concurrent operations update the multimap, even if
* {@code map} and the instances generated by {@code factory} are. Concurrent read operations will
* work correctly. To allow concurrent update operations, wrap the multimap with a call to {@link
* #synchronizedSetMultimap}.
*
* <p>Call this method only when the simpler methods {@link HashMultimap#create()}, {@link
* LinkedHashMultimap#create()}, {@link TreeMultimap#create()}, and {@link
* TreeMultimap#create(Comparator, Comparator)} won't suffice.
*
* <p>Note: the multimap assumes complete ownership over of {@code map} and the sets returned by
* {@code factory}. Those objects should not be manually updated and they should not use soft,
* weak, or phantom references.
*
* @param map place to store the mapping from each key to its corresponding values
* @param factory supplier of new, empty sets that will each hold all values for a given key
* @throws IllegalArgumentException if {@code map} is not empty
*/
public static <K extends @Nullable Object, V extends @Nullable Object>
SetMultimap<K, V> newSetMultimap(
Map<K, Collection<V>> map, Supplier<? extends Set<V>> factory) {
return new CustomSetMultimap<>(map, factory);
}
private static final | than |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/testkit/ShortArrays.java | {
"start": 694,
"end": 1067
} | class ____ {
private static final short[] EMPTY = {};
public static short[] arrayOf(int... values) {
int size = values.length;
short[] array = new short[size];
for (int i = 0; i < size; i++) {
array[i] = (short) values[i];
}
return array;
}
public static short[] emptyArray() {
return EMPTY;
}
private ShortArrays() {}
}
| ShortArrays |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/utils/ExceptionUtil.java | {
"start": 838,
"end": 2049
} | class ____ {
private ExceptionUtil() {
}
/**
* Represents an empty exception, that is, no exception occurs, only a constant.
*/
public static final Exception NONE_EXCEPTION = new RuntimeException("");
public static String getAllExceptionMsg(Throwable e) {
Throwable cause = e;
StringBuilder strBuilder = new StringBuilder();
while (cause != null && !StringUtils.isEmpty(cause.getMessage())) {
strBuilder.append("caused: ").append(cause.getMessage()).append(';');
cause = cause.getCause();
}
return strBuilder.toString();
}
public static Throwable getCause(final Throwable t) {
final Throwable cause = t.getCause();
if (Objects.isNull(cause)) {
return t;
}
return cause;
}
public static String getStackTrace(final Throwable t) {
if (t == null) {
return "";
}
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final PrintStream ps = new PrintStream(out);
t.printStackTrace(ps);
ps.flush();
return out.toString();
}
}
| ExceptionUtil |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java | {
"start": 888,
"end": 1238
} | class ____ extends ActionType<AcknowledgedResponse> {
public static final DeleteModelSnapshotAction INSTANCE = new DeleteModelSnapshotAction();
public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/delete";
private DeleteModelSnapshotAction() {
super(NAME);
}
public static | DeleteModelSnapshotAction |
java | spring-projects__spring-boot | module/spring-boot-data-mongodb/src/test/java/org/springframework/boot/data/mongodb/autoconfigure/domain/country/Country.java | {
"start": 814,
"end": 1127
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private String name;
protected Country() {
}
public Country(String name) {
this.name = name;
}
public String getName() {
return this.name;
}
@Override
public String toString() {
return getName();
}
}
| Country |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/ConfigurableBeanFactory.java | {
"start": 3541,
"end": 3652
} | class ____,
* to be resolved once the factory processes the bean definition.
* @param beanClassLoader the | names |
java | reactor__reactor-core | reactor-core/src/jcstress/java/reactor/core/publisher/FluxBufferTimeoutStressTest.java | {
"start": 3742,
"end": 6535
} | class ____ {
final FastLogger fastLogger = new FastLogger(getClass().getName());
final VirtualTimeScheduler virtualTimeScheduler = VirtualTimeScheduler.create();
final StressSubscriber<List<Long>> subscriber = new StressSubscriber<>();
final FluxBufferTimeout.BufferTimeoutWithBackpressureSubscriber<Long, List<Long>> bufferTimeoutSubscriber =
new FluxBufferTimeout.BufferTimeoutWithBackpressureSubscriber<>(subscriber, 2, 1, TimeUnit.SECONDS, virtualTimeScheduler.createWorker(), bufferSupplier(), fastLogger);
final StressSubscription<Long> subscription = new StressSubscription<>(bufferTimeoutSubscriber);
{
bufferTimeoutSubscriber.onSubscribe(subscription);
}
@Actor
public void next() {
bufferTimeoutSubscriber.onNext(0L);
bufferTimeoutSubscriber.onNext(1L);
bufferTimeoutSubscriber.onNext(2L);
bufferTimeoutSubscriber.onNext(3L);
bufferTimeoutSubscriber.onNext(4L);
bufferTimeoutSubscriber.onComplete();
}
@Actor
public void timeout() {
virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(1));
virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(1));
virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(1));
virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(1));
virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(1));
}
@Arbiter
public void arbiter(LL_Result result) {
result.r1 = subscriber.onNextCalls.get();
result.r2 = subscription.requestsCount.get();
if (subscriber.onCompleteCalls.get() != 1) {
fail(fastLogger, "unexpected completion: " + subscriber.onCompleteCalls.get());
}
if (subscriber.concurrentOnComplete.get()) {
fail(fastLogger, "subscriber concurrent onComplete");
}
if (subscriber.concurrentOnNext.get()) {
fail(fastLogger, "subscriber concurrent onNext");
}
if (!subscriber.discardedValues.isEmpty()) {
fail(fastLogger, "Unexpected discarded values " + subscriber.discardedValues);
}
if (!allValuesHandled(fastLogger, 5, emptyList(),
subscriber.receivedValues)) {
fail(fastLogger, "not all values delivered; result=" + result);
}
}
}
@JCStressTest
@Outcome(id = "5, 1, 2", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "5, 1, 3", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "5, 1, 4", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "5, 1, 5", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "5, 0, 2", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "5, 0, 3", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "5, 0, 4", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "5, 0, 5", expect = Expect.ACCEPTABLE, desc = "")
@Outcome(id = "5, 0, 1", expect = Expect.ACCEPTABLE, desc = "")
@State
public static | FluxBufferTimeoutStressTestRaceDeliveryAndMoreTimeouts |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/defaultmethods/GenericComicCharactersInterfaceDefaultMethodsTests.java | {
"start": 1231,
"end": 1473
} | interface ____
* methods and Java generics in JUnit Jupiter test classes when used with the Spring
* TestContext Framework and the {@link SpringExtension}.
*
* @author Sam Brannen
* @since 5.0
*/
@SpringJUnitConfig(TestConfig.class)
| default |
java | elastic__elasticsearch | x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java | {
"start": 2401,
"end": 2996
} | class ____ extends Model {
public TestDenseModel(String inferenceEntityId, TestDenseInferenceServiceExtension.TestServiceSettings serviceSettings) {
super(
new ModelConfigurations(
inferenceEntityId,
TaskType.TEXT_EMBEDDING,
TestDenseInferenceServiceExtension.TestInferenceService.NAME,
serviceSettings
),
new ModelSecrets(new AbstractTestInferenceService.TestSecretSettings("api_key"))
);
}
}
public static | TestDenseModel |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/bug/JSONObectNullTest.java | {
"start": 202,
"end": 488
} | class ____ extends TestCase {
public void test_for_null() throws Exception {
Model model = JSON.parseObject("{\"value\":null}", Model.class);
}
public void test_for_null2() throws Exception {
JSON.parseObject("null");
}
public static | JSONObectNullTest |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/providers/serialisers/jsonp/JsonObjectHandler.java | {
"start": 515,
"end": 1696
} | class ____ implements MessageBodyReader<JsonObject>, MessageBodyWriter<JsonObject> {
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return JsonObject.class.isAssignableFrom(type);
}
public void writeTo(JsonObject o, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
try (JsonWriter writer = JsonpUtil.writer(entityStream, mediaType)) {
writer.writeObject(o);
}
}
@Override
public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return JsonObject.class.isAssignableFrom(type);
}
@Override
public JsonObject readFrom(Class<JsonObject> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException {
return JsonpUtil.reader(entityStream, mediaType).readObject();
}
}
| JsonObjectHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 87605,
"end": 87773
} | interface ____<@ImmutableTypeParameter T> {}
""")
.addSourceLines(
"ChildGenericWithImmutableParam.java",
"""
| GenericWithImmutableParamIface |
java | quarkusio__quarkus | extensions/spring-data-jpa/deployment/src/test/java/io/quarkus/spring/data/deployment/multiple_pu/MultiplePersistenceUnitConfigTest.java | {
"start": 940,
"end": 4438
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(FirstEntity.class, SecondEntity.class,
FirstEntityRepository.class, SecondEntityRepository.class,
PanacheTestResource.class)
.addAsResource("application-multiple-persistence-units.properties", "application.properties"));
@Inject
private FirstEntityRepository repository1;
@Inject
private SecondEntityRepository repository2;
@BeforeEach
void beforeEach() {
repository1.deleteAll();
repository2.deleteAll();
}
@Test
public void panacheOperations() {
/**
* First entity operations
*/
RestAssured.when().get("/persistence-unit/first/name-1").then().body(Matchers.is("1"));
RestAssured.when().get("/persistence-unit/first/name-2").then().body(Matchers.is("2"));
/**
* second entity operations
*/
RestAssured.when().get("/persistence-unit/second/name-1").then().body(Matchers.is("1"));
RestAssured.when().get("/persistence-unit/second/name-2").then().body(Matchers.is("2"));
}
@Test
public void entityLifecycle() {
var detached = repository2.save(new SecondEntity());
assertThat(detached.id).isNotNull();
assertThat(inTx(repository2::count)).isEqualTo(1);
detached.name = "name";
repository2.save(detached);
assertThat(inTx(repository2::count)).isEqualTo(1);
inTx(() -> {
var lazyRef = repository2.getOne(detached.id);
assertThat(lazyRef.name).isEqualTo(detached.name);
return null;
});
repository2.deleteByName("otherThan" + detached.name);
assertThat(inTx(() -> repository2.findById(detached.id))).isPresent();
repository2.deleteByName(detached.name);
assertThat(inTx(() -> repository2.findById(detached.id))).isEmpty();
}
@Test
void pagedQueries() {
var newEntity = new SecondEntity();
newEntity.name = "name";
var detached = repository2.save(newEntity);
Pageable pageable = PageRequest.of(0, 10, Sort.Direction.DESC, "id");
var page = inTx(() -> repository2.findByName(detached.name, pageable));
assertThat(page.getContent()).extracting(e -> e.id).containsExactly(detached.id);
var pageIndexParam = inTx(() -> repository2.findByNameQueryIndexed(detached.name, pageable));
assertThat(pageIndexParam.getContent()).extracting(e -> e.id).containsExactly(detached.id);
var pageNamedParam = inTx(() -> repository2.findByNameQueryNamed(detached.name, pageable));
assertThat(pageNamedParam.getContent()).extracting(e -> e.id).containsExactly(detached.id);
}
@Test
void cascading() {
var newParent = new SecondEntity();
newParent.name = "parent";
var newChild = new SecondEntity();
newChild.name = "child";
newParent.child = newChild;
var detachedParent = repository2.save(newParent);
assertThat(inTx(repository2::count)).isEqualTo(2);
repository2.deleteByName(detachedParent.name);
assertThat(inTx(repository2::count)).isZero();
}
private <T> T inTx(Supplier<T> action) {
return QuarkusTransaction.requiringNew().call(action::get);
}
}
| MultiplePersistenceUnitConfigTest |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/scan/valid/ConfigurationPropertiesScanConfiguration.java | {
"start": 1173,
"end": 1366
} | class ____ {
@ConfigurationPropertiesScan
@EnableConfigurationProperties({ ConfigurationPropertiesScanConfiguration.FooProperties.class })
public static | ConfigurationPropertiesScanConfiguration |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/handler/invocation/HandlerMethodReturnValueHandlerComposite.java | {
"start": 1632,
"end": 4469
} | class ____.
* @param logger the logger to use
* @since 5.1
*/
public void setLogger(Log logger) {
this.logger = logger;
}
/**
* Return the currently configured Logger.
* @since 5.1
*/
public Log getLogger() {
return logger;
}
/**
* Return a read-only list with the configured handlers.
*/
public List<HandlerMethodReturnValueHandler> getReturnValueHandlers() {
return Collections.unmodifiableList(this.returnValueHandlers);
}
/**
* Clear the list of configured handlers.
*/
public void clear() {
this.returnValueHandlers.clear();
}
/**
* Add the given {@link HandlerMethodReturnValueHandler}.
*/
public HandlerMethodReturnValueHandlerComposite addHandler(HandlerMethodReturnValueHandler returnValueHandler) {
this.returnValueHandlers.add(returnValueHandler);
return this;
}
/**
* Add the given {@link HandlerMethodReturnValueHandler HandlerMethodReturnValueHandlers}.
*/
public HandlerMethodReturnValueHandlerComposite addHandlers(
@Nullable List<? extends HandlerMethodReturnValueHandler> handlers) {
if (handlers != null) {
this.returnValueHandlers.addAll(handlers);
}
return this;
}
@Override
public boolean supportsReturnType(MethodParameter returnType) {
return getReturnValueHandler(returnType) != null;
}
@SuppressWarnings("ForLoopReplaceableByForEach")
private @Nullable HandlerMethodReturnValueHandler getReturnValueHandler(MethodParameter returnType) {
for (HandlerMethodReturnValueHandler handler : this.returnValueHandlers) {
if (handler.supportsReturnType(returnType)) {
return handler;
}
}
return null;
}
@Override
public void handleReturnValue(@Nullable Object returnValue, MethodParameter returnType, Message<?> message)
throws Exception {
HandlerMethodReturnValueHandler handler = getReturnValueHandler(returnType);
if (handler == null) {
throw new IllegalStateException("No handler for return value type: " + returnType.getParameterType());
}
if (logger.isTraceEnabled()) {
logger.trace("Processing return value with " + handler);
}
handler.handleReturnValue(returnValue, returnType, message);
}
@Override
public boolean isAsyncReturnValue(Object returnValue, MethodParameter returnType) {
HandlerMethodReturnValueHandler handler = getReturnValueHandler(returnType);
return (handler instanceof AsyncHandlerMethodReturnValueHandler asyncHandler &&
asyncHandler.isAsyncReturnValue(returnValue, returnType));
}
@Override
public @Nullable CompletableFuture<?> toCompletableFuture(Object returnValue, MethodParameter returnType) {
HandlerMethodReturnValueHandler handler = getReturnValueHandler(returnType);
if (handler instanceof AsyncHandlerMethodReturnValueHandler asyncHandler) {
return asyncHandler.toCompletableFuture(returnValue, returnType);
}
return null;
}
}
| name |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/test/java/org/apache/camel/test/main/junit5/annotation/InheritanceTest.java | {
"start": 1007,
"end": 1077
} | class ____ that the annotation inheritance works as expected.
*/
| ensuring |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepInfoTests.java | {
"start": 558,
"end": 1513
} | class ____ extends AbstractXContentTestCase<SegmentCountStep.Info> {
@Override
protected Info createTestInstance() {
return new Info(randomNonNegativeLong());
}
@Override
protected Info doParseInstance(XContentParser parser) throws IOException {
return Info.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
public final void testEqualsAndHashcode() {
for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) {
EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copyInstance, this::mutateInstance);
}
}
protected final Info copyInstance(Info instance) throws IOException {
return new Info(instance.numberShardsLeftToMerge());
}
protected Info mutateInstance(Info instance) throws IOException {
return createTestInstance();
}
}
| SegmentCountStepInfoTests |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/TransactionLookupTableTests.java | {
"start": 642,
"end": 2809
} | class ____ extends ESTestCase {
static BigArrays mockBigArrays() {
return new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService());
}
public void testBasic() {
try (TransactionsLookupTable transactions = new TransactionsLookupTable(10, mockBigArrays())) {
// setup 3 transactions
ItemSetBitSet set = new ItemSetBitSet();
set.set(0);
set.set(3);
set.set(200);
set.set(5);
set.set(65);
transactions.append(set);
set.clear();
set.set(2);
set.set(33);
set.set(44);
transactions.append(set);
assertEquals(2, transactions.size());
set.clear();
set.set(3);
set.set(5);
set.set(65);
set.set(99);
transactions.append(set);
assertEquals(3, transactions.size());
// lookup
set.clear();
set.set(3);
set.set(65);
assertTrue(transactions.isSubsetOf(0, set));
assertFalse(transactions.isSubsetOf(1, set));
assertTrue(transactions.isSubsetOf(2, set));
set.set(64);
assertFalse(transactions.isSubsetOf(0, set));
assertFalse(transactions.isSubsetOf(1, set));
assertFalse(transactions.isSubsetOf(2, set));
set.clear(64);
set.set(258);
assertFalse(transactions.isSubsetOf(0, set));
assertFalse(transactions.isSubsetOf(1, set));
assertFalse(transactions.isSubsetOf(2, set));
set.clear(258);
set.set(400);
assertFalse(transactions.isSubsetOf(0, set));
assertFalse(transactions.isSubsetOf(1, set));
assertFalse(transactions.isSubsetOf(2, set));
set.clear(400);
set.set(99);
assertFalse(transactions.isSubsetOf(0, set));
assertFalse(transactions.isSubsetOf(1, set));
assertTrue(transactions.isSubsetOf(2, set));
}
}
}
| TransactionLookupTableTests |
java | apache__camel | core/camel-main/src/generated/java/org/apache/camel/main/TelemetryDevConfigurationPropertiesConfigurer.java | {
"start": 713,
"end": 3512
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Enabled", boolean.class);
map.put("ExcludePatterns", java.lang.String.class);
map.put("TraceFormat", java.lang.String.class);
map.put("TraceProcessors", boolean.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.main.TelemetryDevConfigurationProperties target = (org.apache.camel.main.TelemetryDevConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "enabled": target.setEnabled(property(camelContext, boolean.class, value)); return true;
case "excludepatterns":
case "excludePatterns": target.setExcludePatterns(property(camelContext, java.lang.String.class, value)); return true;
case "traceformat":
case "traceFormat": target.setTraceFormat(property(camelContext, java.lang.String.class, value)); return true;
case "traceprocessors":
case "traceProcessors": target.setTraceProcessors(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "enabled": return boolean.class;
case "excludepatterns":
case "excludePatterns": return java.lang.String.class;
case "traceformat":
case "traceFormat": return java.lang.String.class;
case "traceprocessors":
case "traceProcessors": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.main.TelemetryDevConfigurationProperties target = (org.apache.camel.main.TelemetryDevConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "enabled": return target.isEnabled();
case "excludepatterns":
case "excludePatterns": return target.getExcludePatterns();
case "traceformat":
case "traceFormat": return target.getTraceFormat();
case "traceprocessors":
case "traceProcessors": return target.isTraceProcessors();
default: return null;
}
}
}
| TelemetryDevConfigurationPropertiesConfigurer |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/spi/AbstractCommonQueryContract.java | {
"start": 4868,
"end": 10503
} | class ____ implements CommonQueryContract {
private final SharedSessionContractImplementor session;
private final QueryOptionsImpl queryOptions;
public AbstractCommonQueryContract(SharedSessionContractImplementor session) {
this.session = session;
this.queryOptions = new QueryOptionsImpl();
}
protected AbstractCommonQueryContract(AbstractCommonQueryContract original) {
this.session = original.session;
this.queryOptions = original.queryOptions;
}
public final SharedSessionContractImplementor getSession() {
return session;
}
public final SessionFactoryImplementor getSessionFactory() {
return session.getFactory();
}
public final MappingMetamodelImplementor getMappingMetamodel() {
return session.getFactory().getMappingMetamodel();
}
public final TypeConfiguration getTypeConfiguration() {
return session.getFactory().getTypeConfiguration();
}
protected QueryInterpretationCache getInterpretationCache() {
return session.getFactory().getQueryEngine().getInterpretationCache();
}
protected final ExceptionConverter getExceptionConverter() {
return session.getExceptionConverter();
}
protected int getIntegerLiteral(JpaExpression<Number> expression, int defaultValue) {
if ( expression == null ) {
return defaultValue;
}
else if ( expression instanceof SqmLiteral<?> ) {
return ( (SqmLiteral<Number>) expression ).getLiteralValue().intValue();
}
else if ( expression instanceof Parameter<?> ) {
final Number parameterValue = getParameterValue( (Parameter<Number>) expression );
return parameterValue == null ? defaultValue : parameterValue.intValue();
}
throw new IllegalArgumentException( "Can't get integer literal value from: " + expression );
}
protected int getMaxRows(SqmSelectStatement<?> selectStatement, int size) {
final var fetchExpression = selectStatement.getFetch();
if ( fetchExpression != null ) {
final var fetchValue = fetchValue( fetchExpression );
if ( fetchValue != null ) {
// Note that we can never have ties because this is only used when we deduplicate results
return switch ( selectStatement.getFetchClauseType() ) {
case ROWS_ONLY, ROWS_WITH_TIES -> fetchValue.intValue();
case PERCENT_ONLY, PERCENT_WITH_TIES ->
(int) Math.ceil( (((double) size) * fetchValue.doubleValue()) / 100d );
};
}
}
return -1;
}
private Number fetchValue(JpaExpression<Number> expression) {
if ( expression instanceof SqmLiteral<?> ) {
return ((SqmLiteral<Number>) expression).getLiteralValue();
}
else if ( expression instanceof SqmParameter<?> ) {
return getParameterValue( (Parameter<Number>) expression );
}
else {
throw new IllegalArgumentException( "Can't get max rows value from: " + expression );
}
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Hints
public Map<String, Object> getHints() {
// According to the JPA spec, this should force a rollback, but that's insane :)
// If the TCK ever adds a check for this, we may need to change this behavior
checkOpenNoRollback();
final Map<String,Object> hints = new HashMap<>();
collectHints( hints );
return hints;
}
@SuppressWarnings("deprecation")
protected void collectHints(Map<String, Object> hints) {
final var queryOptions = getQueryOptions();
if ( queryOptions.getTimeout() != null ) {
hints.put( HINT_TIMEOUT, queryOptions.getTimeout() );
hints.put( HINT_SPEC_QUERY_TIMEOUT, queryOptions.getTimeout() * 1000 );
}
putIfNotNull( hints, HINT_COMMENT, getComment() );
putIfNotNull( hints, HINT_FLUSH_MODE, queryOptions.getFlushMode() );
putIfNotNull( hints, HINT_READONLY, queryOptions.isReadOnly() );
putIfNotNull( hints, HINT_FETCH_SIZE, queryOptions.getFetchSize() );
putIfNotNull( hints, HINT_CACHEABLE, queryOptions.isResultCachingEnabled() );
putIfNotNull( hints, HINT_CACHE_REGION, queryOptions.getResultCacheRegionName() );
putIfNotNull( hints, HINT_CACHE_MODE, queryOptions.getCacheMode() );
putIfNotNull( hints, HINT_QUERY_PLAN_CACHEABLE, queryOptions.getQueryPlanCachingEnabled() );
putIfNotNull( hints, HINT_SPEC_CACHE_RETRIEVE_MODE, queryOptions.getCacheRetrieveMode() );
putIfNotNull( hints, HINT_JAVAEE_CACHE_RETRIEVE_MODE, queryOptions.getCacheRetrieveMode() );
putIfNotNull( hints, HINT_SPEC_CACHE_STORE_MODE, queryOptions.getCacheStoreMode() );
putIfNotNull( hints, HINT_JAVAEE_CACHE_STORE_MODE, queryOptions.getCacheStoreMode() );
final var appliedGraph = queryOptions.getAppliedGraph();
if ( appliedGraph != null ) {
final var semantic = appliedGraph.getSemantic();
if ( semantic != null ) {
hints.put( semantic.getJakartaHintName(), appliedGraph.getGraph() );
hints.put( semantic.getJpaHintName(), appliedGraph.getGraph() );
}
}
final var lockOptions = getLockOptions();
if ( !lockOptions.isEmpty() ) {
final var lockMode = lockOptions.getLockMode();
if ( lockMode != null && lockMode != LockMode.NONE ) {
hints.put( HINT_NATIVE_LOCKMODE, lockMode );
}
if ( lockOptions.getFollowOnStrategy() != null ) {
hints.put( HINT_FOLLOW_ON_STRATEGY, lockOptions.getFollowOnStrategy() );
}
if ( lockOptions.getTimeout().milliseconds() != Timeouts.WAIT_FOREVER_MILLI ) {
hints.put( HINT_SPEC_LOCK_TIMEOUT, lockOptions.getTimeOut() );
hints.put( HINT_JAVAEE_LOCK_TIMEOUT, lockOptions.getTimeOut() );
}
}
}
protected void putIfNotNull(Map<String, Object> hints, String hintName, Enum<?> hintValue) {
// centralized spot to handle the decision whether to put enums directly into the hints map
// or whether to put the | AbstractCommonQueryContract |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/io/FileInputFormat.java | {
"start": 2803,
"end": 10580
} | class ____<OT> extends RichInputFormat<OT, FileInputSplit> {
// -------------------------------------- Constants -------------------------------------------
private static final Logger LOG = LoggerFactory.getLogger(FileInputFormat.class);
private static final long serialVersionUID = 1L;
/** The fraction that the last split may be larger than the others. */
private static final float MAX_SPLIT_SIZE_DISCREPANCY = 1.1f;
/** The timeout (in milliseconds) to wait for a filesystem stream to respond. */
private static long DEFAULT_OPENING_TIMEOUT;
/**
* A mapping of file extensions to decompression algorithms based on DEFLATE. Such compressions
* lead to unsplittable files.
*/
protected static final Map<String, InflaterInputStreamFactory<?>>
INFLATER_INPUT_STREAM_FACTORIES = new HashMap<String, InflaterInputStreamFactory<?>>();
/** The splitLength is set to -1L for reading the whole split. */
protected static final long READ_WHOLE_SPLIT_FLAG = -1L;
static {
initDefaultsFromConfiguration(GlobalConfiguration.loadConfiguration());
initDefaultInflaterInputStreamFactories();
}
/**
* Initialize defaults for input format. Needs to be a static method because it is configured
* for local cluster execution.
*
* @param configuration The configuration to load defaults from
*/
private static void initDefaultsFromConfiguration(Configuration configuration) {
final long to = configuration.get(FS_STREAM_OPENING_TIME_OUT).toMillis();
if (to < 0) {
LOG.error(
"Invalid timeout value for filesystem stream opening: "
+ to
+ ". Using default value of "
+ FS_STREAM_OPENING_TIME_OUT.defaultValue().toMillis());
DEFAULT_OPENING_TIMEOUT = FS_STREAM_OPENING_TIME_OUT.defaultValue().toMillis();
} else if (to == 0) {
DEFAULT_OPENING_TIMEOUT = 300000; // 5 minutes
} else {
DEFAULT_OPENING_TIMEOUT = to;
}
}
private static void initDefaultInflaterInputStreamFactories() {
InflaterInputStreamFactory<?>[] defaultFactories = {
DeflateInflaterInputStreamFactory.getInstance(),
GzipInflaterInputStreamFactory.getInstance(),
Bzip2InputStreamFactory.getInstance(),
XZInputStreamFactory.getInstance(),
ZStandardInputStreamFactory.getInstance()
};
for (InflaterInputStreamFactory<?> inputStreamFactory : defaultFactories) {
for (String fileExtension : inputStreamFactory.getCommonFileExtensions()) {
registerInflaterInputStreamFactory(fileExtension, inputStreamFactory);
}
}
}
/**
* Registers a decompression algorithm through a {@link
* org.apache.flink.api.common.io.compression.InflaterInputStreamFactory} with a file extension
* for transparent decompression.
*
* @param fileExtension of the compressed files
* @param factory to create an {@link java.util.zip.InflaterInputStream} that handles the
* decompression format
*/
public static void registerInflaterInputStreamFactory(
String fileExtension, InflaterInputStreamFactory<?> factory) {
synchronized (INFLATER_INPUT_STREAM_FACTORIES) {
if (INFLATER_INPUT_STREAM_FACTORIES.put(fileExtension, factory) != null) {
LOG.warn(
"Overwriting an existing decompression algorithm for \"{}\" files.",
fileExtension);
}
}
}
protected static InflaterInputStreamFactory<?> getInflaterInputStreamFactory(
String fileExtension) {
synchronized (INFLATER_INPUT_STREAM_FACTORIES) {
return INFLATER_INPUT_STREAM_FACTORIES.get(fileExtension);
}
}
@VisibleForTesting
public static Set<String> getSupportedCompressionFormats() {
return INFLATER_INPUT_STREAM_FACTORIES.keySet();
}
/**
* Returns the extension of a file name (!= a path).
*
* @return the extension of the file name or {@code null} if there is no extension.
*/
protected static String extractFileExtension(String fileName) {
checkNotNull(fileName);
int lastPeriodIndex = fileName.lastIndexOf('.');
if (lastPeriodIndex < 0) {
return null;
} else {
return fileName.substring(lastPeriodIndex + 1);
}
}
// --------------------------------------------------------------------------------------------
// Variables for internal operation.
// They are all transient, because we do not want them so be serialized
// --------------------------------------------------------------------------------------------
/** The input stream reading from the input file. */
protected transient FSDataInputStream stream;
/** The start of the split that this parallel instance must consume. */
protected transient long splitStart;
/** The length of the split that this parallel instance must consume. */
protected transient long splitLength;
/** The current split that this parallel instance must consume. */
protected transient FileInputSplit currentSplit;
// --------------------------------------------------------------------------------------------
// The configuration parameters. Configured on the instance and serialized to be shipped.
// --------------------------------------------------------------------------------------------
/** The list of paths to files and directories that contain the input. */
private Path[] filePaths;
/** The minimal split size, set by the configure() method. */
protected long minSplitSize = 0;
/** The desired number of splits, as set by the configure() method. */
protected int numSplits = -1;
/** Stream opening timeout. */
protected long openTimeout = DEFAULT_OPENING_TIMEOUT;
/**
* Some file input formats are not splittable on a block level (deflate) Therefore, the
* FileInputFormat can only read whole files.
*/
protected boolean unsplittable = false;
/**
* The flag to specify whether recursive traversal of the input directory structure is enabled.
*/
protected boolean enumerateNestedFiles = false;
/** Files filter for determining what files/directories should be included. */
private FilePathFilter filesFilter = new GlobFilePathFilter();
// --------------------------------------------------------------------------------------------
// Constructors
// --------------------------------------------------------------------------------------------
public FileInputFormat() {}
protected FileInputFormat(Path filePath) {
if (filePath != null) {
setFilePath(filePath);
}
}
// --------------------------------------------------------------------------------------------
// Getters/setters for the configurable parameters
// --------------------------------------------------------------------------------------------
/**
* Returns the paths of all files to be read by the FileInputFormat.
*
* @return The list of all paths to read.
*/
public Path[] getFilePaths() {
if (this.filePaths == null) {
return new Path[0];
}
return this.filePaths;
}
public void setFilePath(String filePath) {
if (filePath == null) {
throw new IllegalArgumentException("File path cannot be null.");
}
// TODO The job-submission web | FileInputFormat |
java | grpc__grpc-java | istio-interop-testing/src/main/java/io/grpc/testing/istio/EchoTestServer.java | {
"start": 13254,
"end": 19075
} | class ____ {
EchoResponse response;
Status status;
}
private ForwardEchoResponse buildEchoResponse(ForwardEchoRequest request)
throws InterruptedException {
ForwardEchoResponse.Builder forwardEchoResponseBuilder
= ForwardEchoResponse.newBuilder();
String rawUrl = request.getUrl();
List<String> urlParts = Splitter.on(':').limit(2).splitToList(rawUrl);
if (urlParts.size() < 2) {
throw new StatusRuntimeException(
Status.INVALID_ARGUMENT.withDescription("No protocol configured for url " + rawUrl));
}
ChannelCredentials creds;
String target = null;
if ("grpc".equals(urlParts.get(0))) {
// We don't really want to test this but the istio test infrastructure needs
// this to be supported. If we ever decide to add support for this properly,
// we would need to add support for TLS creds here.
creds = InsecureChannelCredentials.create();
target = urlParts.get(1).substring(2);
} else if ("xds".equals(urlParts.get(0))) {
creds = XdsChannelCredentials.create(InsecureChannelCredentials.create());
target = rawUrl;
} else {
logger.log(Level.INFO, "Protocol {0} not supported. Forwarding to {1}",
new String[]{urlParts.get(0), forwardingAddress});
return forwardingStub.withDeadline(Context.current().getDeadline()).forwardEcho(request);
}
ManagedChannelBuilder<?> channelBuilder = Grpc.newChannelBuilder(target, creds);
ManagedChannel channel = channelBuilder.build();
List<Header> requestHeaders = request.getHeadersList();
Metadata metadata = new Metadata();
for (Header header : requestHeaders) {
metadata.put(Metadata.Key.of(header.getKey(), Metadata.ASCII_STRING_MARSHALLER),
header.getValue());
}
int count = request.getCount() == 0 ? 1 : request.getCount();
// Calculate the amount of time to sleep after each call.
Duration durationPerQuery = Duration.ZERO;
if (request.getQps() > 0) {
durationPerQuery = Duration.ofNanos(
Duration.ofSeconds(1).toNanos() / request.getQps());
}
logger.info("qps=" + request.getQps());
logger.info("durationPerQuery=" + durationPerQuery);
EchoRequest echoRequest = EchoRequest.newBuilder()
.setMessage(request.getMessage())
.build();
Instant start = Instant.now();
logger.info("starting instant=" + start);
Duration expected = Duration.ZERO;
final CountDownLatch latch = new CountDownLatch(count);
EchoCall[] echoCalls = new EchoCall[count];
for (int i = 0; i < count; i++) {
Metadata currentMetadata = new Metadata();
currentMetadata.merge(metadata);
currentMetadata.put(
Metadata.Key.of(REQUEST_ID, Metadata.ASCII_STRING_MARSHALLER), "" + i);
EchoTestServiceGrpc.EchoTestServiceFutureStub stub
= EchoTestServiceGrpc.newFutureStub(channel).withInterceptors(
MetadataUtils.newAttachHeadersInterceptor(currentMetadata))
.withDeadlineAfter(request.getTimeoutMicros(), TimeUnit.MICROSECONDS);
echoCalls[i] = new EchoCall();
callEcho(stub, echoRequest, echoCalls[i], latch);
Instant current = Instant.now();
logger.info("after rpc instant=" + current);
Duration elapsed = Duration.between(start, current);
expected = expected.plus(durationPerQuery);
Duration timeLeft = expected.minus(elapsed);
logger.info("elapsed=" + elapsed + ", expected=" + expected + ", timeLeft=" + timeLeft);
if (!timeLeft.isNegative()) {
logger.info("sleeping for ms =" + timeLeft);
Thread.sleep(timeLeft.toMillis());
}
}
latch.await();
for (int i = 0; i < count; i++) {
if (Status.OK.equals(echoCalls[i].status)) {
forwardEchoResponseBuilder.addOutput(
buildForwardEchoStruct(i, echoCalls, request.getMessage()));
} else {
logger.log(Level.SEVERE, "RPC {0} failed {1}: {2}",
new Object[]{i, echoCalls[i].status.getCode(), echoCalls[i].status.getDescription()});
forwardEchoResponseBuilder.clear();
throw echoCalls[i].status.asRuntimeException();
}
}
return forwardEchoResponseBuilder.build();
}
private static String buildForwardEchoStruct(int i, EchoCall[] echoCalls,
String requestMessage) {
// The test infrastructure might expect the entire struct instead of
// just the message.
StringBuilder sb = new StringBuilder();
sb.append(String.format("[%d] grpcecho.Echo(%s)\n", i, requestMessage));
Iterable<String> iterable = Splitter.on('\n').split(echoCalls[i].response.getMessage());
for (String line : iterable) {
if (!line.isEmpty()) {
sb.append(String.format("[%d body] %s\n", i, line));
}
}
return sb.toString();
}
private void callEcho(EchoTestServiceFutureStub stub,
EchoRequest echoRequest, final EchoCall echoCall, CountDownLatch latch) {
ListenableFuture<EchoResponse> response = stub.echo(echoRequest);
Futures.addCallback(
response,
new FutureCallback<EchoResponse>() {
@Override
public void onSuccess(@Nullable EchoResponse result) {
echoCall.response = result;
echoCall.status = Status.OK;
latch.countDown();
}
@Override
public void onFailure(Throwable t) {
echoCall.status = Status.fromThrowable(t);
latch.countDown();
}
},
MoreExecutors.directExecutor());
}
}
private static | EchoCall |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/time/FastDatePrinter.java | {
"start": 21478,
"end": 22539
} | class ____ implements NumberRule {
private final int field;
/**
* Constructs an instance of {@link TwoDigitNumberField} with the specified field.
*
* @param field the field
*/
TwoDigitNumberField(final int field) {
this.field = field;
}
/**
* {@inheritDoc}
*/
@Override
public void appendTo(final Appendable buffer, final Calendar calendar) throws IOException {
appendTo(buffer, calendar.get(field));
}
/**
* {@inheritDoc}
*/
@Override
public void appendTo(final Appendable buffer, final int value) throws IOException {
if (value < 100) {
appendDigits(buffer, value);
} else {
appendFullDigits(buffer, value, 2);
}
}
/**
* {@inheritDoc}
*/
@Override
public int estimateLength() {
return 2;
}
}
/**
* Inner | TwoDigitNumberField |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.