language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/tck/UsingTckTest.java | {
"start": 830,
"end": 1181
} | class ____ extends BaseTck<Long> {
@Override
public Publisher<Long> createPublisher(long elements) {
return
Flowable.using(Functions.justSupplier(1),
Functions.justFunction(Flowable.fromIterable(iterate(elements))),
Functions.emptyConsumer()
)
;
}
}
| UsingTckTest |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java | {
"start": 13312,
"end": 25301
} | class ____ {
private final Job job;
private final JobContext jContext;
private final TaskAttemptContext tContext;
private final AbstractS3ACommitter committer;
private final Configuration conf;
private Path writtenTextPath; // null if not written to
public JobData(Job job,
JobContext jContext,
TaskAttemptContext tContext,
AbstractS3ACommitter committer) {
this.job = job;
this.jContext = jContext;
this.tContext = tContext;
this.committer = committer;
conf = job.getConfiguration();
}
public Job getJob() {
return job;
}
public JobContext getJContext() {
return jContext;
}
public TaskAttemptContext getTContext() {
return tContext;
}
public AbstractS3ACommitter getCommitter() {
return committer;
}
public Configuration getConf() {
return conf;
}
public Path getWrittenTextPath() {
return writtenTextPath;
}
}
/**
* Create a new job. Sets the task attempt ID,
* and output dir; asks for a success marker.
* @return the new job
* @throws IOException failure
*/
public Job newJob() throws IOException {
return newJob(outDir, getConfiguration(), attempt0);
}
/**
* Create a new job. Sets the task attempt ID,
* and output dir; asks for a success marker.
* @param dir dest dir
* @param configuration config to get the job from
* @param taskAttemptId task attempt
* @return the new job
* @throws IOException failure
*/
private Job newJob(Path dir, Configuration configuration,
String taskAttemptId) throws IOException {
Job job = Job.getInstance(configuration);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptId);
conf.setBoolean(CREATE_SUCCESSFUL_JOB_OUTPUT_DIR_MARKER, true);
FileOutputFormat.setOutputPath(job, dir);
return job;
}
/**
* Start a job with a committer; optionally write the test data.
* Always register the job to be aborted (quietly) in teardown.
* This is, from an "OO-purity perspective" the wrong kind of method to
* do: it's setting things up, mixing functionality, registering for teardown.
* Its aim is simple though: a common body of code for starting work
* in test cases.
* @param writeText should the text be written?
* @return the job data 4-tuple
* @throws IOException IO problems
* @throws InterruptedException interruption during write
*/
protected JobData startJob(boolean writeText)
throws IOException, InterruptedException {
return startJob(standardCommitterFactory, writeText);
}
/**
* Start a job with a committer; optionally write the test data.
* Always register the job to be aborted (quietly) in teardown.
* This is, from an "OO-purity perspective" the wrong kind of method to
* do: it's setting things up, mixing functionality, registering for teardown.
* Its aim is simple though: a common body of code for starting work
* in test cases.
* @param factory the committer factory to use
* @param writeText should the text be written?
* @return the job data 4-tuple
* @throws IOException IO problems
* @throws InterruptedException interruption during write
*/
protected JobData startJob(CommitterFactory factory, boolean writeText)
throws IOException, InterruptedException {
Job job = newJob();
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt0);
conf.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 1);
JobContext jContext = new JobContextImpl(conf, taskAttempt0.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf,
taskAttempt0);
AbstractS3ACommitter committer = factory.createCommitter(tContext);
// setup
JobData jobData = new JobData(job, jContext, tContext, committer);
setup(jobData);
abortInTeardown(jobData);
if (writeText) {
// write output
jobData.writtenTextPath = writeTextOutput(tContext);
}
return jobData;
}
/**
* Set up the job and task.
* @param jobData job data
* @throws IOException problems
*/
protected void setup(JobData jobData) throws IOException {
AbstractS3ACommitter committer = jobData.committer;
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
describe("\nsetup job");
try (DurationInfo d = new DurationInfo(LOG,
"setup job %s", jContext.getJobID())) {
committer.setupJob(jContext);
}
setupCommitter(committer, tContext);
describe("setup complete\n");
}
private void setupCommitter(
final AbstractS3ACommitter committer,
final TaskAttemptContext tContext) throws IOException {
try (DurationInfo d = new DurationInfo(LOG,
"setup task %s", tContext.getTaskAttemptID())) {
committer.setupTask(tContext);
}
}
/**
* Abort a job quietly.
* @param jobData job info
*/
protected void abortJobQuietly(JobData jobData) {
abortJobQuietly(jobData.committer, jobData.jContext, jobData.tContext);
}
/**
* Abort a job quietly: first task, then job.
* @param committer committer
* @param jContext job context
* @param tContext task context
*/
protected void abortJobQuietly(AbstractS3ACommitter committer,
JobContext jContext,
TaskAttemptContext tContext) {
describe("\naborting task");
try {
committer.abortTask(tContext);
} catch (IOException e) {
log().warn("Exception aborting task:", e);
}
describe("\naborting job");
try {
committer.abortJob(jContext, JobStatus.State.KILLED);
} catch (IOException e) {
log().warn("Exception aborting job", e);
}
}
/**
* Commit up the task and then the job.
* @param committer committer
* @param jContext job context
* @param tContext task context
* @throws IOException problems
*/
protected void commit(AbstractS3ACommitter committer,
JobContext jContext,
TaskAttemptContext tContext) throws IOException {
try (DurationInfo d = new DurationInfo(LOG,
"committing work", jContext.getJobID())) {
describe("\ncommitting task");
committer.commitTask(tContext);
describe("\ncommitting job");
committer.commitJob(jContext);
describe("commit complete\n");
}
}
/**
* Execute work as part of a test, after creating the job.
* After the execution, {@link #abortJobQuietly(JobData)} is
* called for abort/cleanup.
* @param name name of work (for logging)
* @param action action to execute
* @throws Exception failure
*/
protected void executeWork(String name, ActionToTest action)
throws Exception {
executeWork(name, startJob(false), action);
}
/**
* Execute work as part of a test, against the created job.
* After the execution, {@link #abortJobQuietly(JobData)} is
* called for abort/cleanup.
* @param name name of work (for logging)
* @param jobData job info
* @param action action to execute
* @throws Exception failure
*/
public void executeWork(String name,
JobData jobData,
ActionToTest action) throws Exception {
try (DurationInfo d = new DurationInfo(LOG, "Executing %s", name)) {
action.exec(jobData.job,
jobData.jContext,
jobData.tContext,
jobData.committer);
} finally {
abortJobQuietly(jobData);
}
}
/**
* Verify that recovery doesn't work for these committers.
*/
@Test
@SuppressWarnings("deprecation")
public void testRecoveryAndCleanup() throws Exception {
describe("Test (Unsupported) task recovery.");
JobData jobData = startJob(true);
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
assertNotNull(committer.getWorkPath(),
"null workPath in committer " + committer);
assertNotNull(committer.getOutputPath(),
"null outputPath in committer " + committer);
// note the task attempt path.
Path job1TaskAttempt0Path = committer.getTaskAttemptPath(tContext);
// Commit the task. This will promote data and metadata to where
// job commits will pick it up on commit or abort.
commitTask(committer, tContext);
assertTaskAttemptPathDoesNotExist(committer, tContext);
Configuration conf2 = jobData.job.getConfiguration();
conf2.set(MRJobConfig.TASK_ATTEMPT_ID, attempt0);
conf2.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 2);
JobContext jContext2 = new JobContextImpl(conf2, taskAttempt0.getJobID());
TaskAttemptContext tContext2 = new TaskAttemptContextImpl(conf2,
taskAttempt0);
AbstractS3ACommitter committer2 = createCommitter(tContext2);
committer2.setupJob(tContext2);
assertFalse(committer2.isRecoverySupported(),
"recoverySupported in " + committer2);
intercept(PathCommitException.class, "recover",
() -> committer2.recoverTask(tContext2));
// the new task attempt path is different from the first, because the
// job attempt counter is used in the path
final Path job2TaskAttempt0Path = committer2.getTaskAttemptPath(tContext2);
LOG.info("Job attempt 1 task attempt path {}; attempt 2 path {}",
job1TaskAttempt0Path, job2TaskAttempt0Path);
assertNotEquals(job1TaskAttempt0Path,
job2TaskAttempt0Path, "Task attempt paths must differ");
// at this point, task attempt 0 has failed to recover
// it should be abortable though. This will be a no-op as it already
// committed
describe("aborting task attempt 2; expect nothing to clean up");
committer2.abortTask(tContext2);
describe("Aborting job 2; expect pending commits to be aborted");
committer2.abortJob(jContext2, JobStatus.State.KILLED);
// now, state of system may still have pending data
assertNoMultipartUploadsPending(outDir);
}
protected void assertTaskAttemptPathDoesNotExist(
AbstractS3ACommitter committer, TaskAttemptContext context)
throws IOException {
Path attemptPath = committer.getTaskAttemptPath(context);
ContractTestUtils.assertPathDoesNotExist(
attemptPath.getFileSystem(context.getConfiguration()),
"task attempt dir",
attemptPath);
}
protected void assertJobAttemptPathDoesNotExist(
AbstractS3ACommitter committer, JobContext context)
throws IOException {
Path attemptPath = committer.getJobAttemptPath(context);
ContractTestUtils.assertPathDoesNotExist(
attemptPath.getFileSystem(context.getConfiguration()),
"job attempt dir",
attemptPath);
}
/**
* Verify the output of the directory.
* That includes the {@code part-m-00000-*}
* file existence and contents, as well as optionally, the success marker.
* @param dir directory to scan.
* @param expectSuccessMarker check the success marker?
* @param expectedJobId job ID, verified if non-empty and success data loaded
* @throws Exception failure.
*/
private void validateContent(Path dir,
boolean expectSuccessMarker,
String expectedJobId) throws Exception {
if (expectSuccessMarker) {
SuccessData successData = verifySuccessMarker(dir, expectedJobId);
}
Path expectedFile = getPart0000(dir);
log().debug("Validating content in {}", expectedFile);
StringBuilder expectedOutput = new StringBuilder();
expectedOutput.append(KEY_1).append('\t').append(VAL_1).append("\n");
expectedOutput.append(VAL_1).append("\n");
expectedOutput.append(VAL_2).append("\n");
expectedOutput.append(KEY_2).append("\n");
expectedOutput.append(KEY_1).append("\n");
expectedOutput.append(KEY_2).append('\t').append(VAL_2).append("\n");
String output = readFile(expectedFile);
assertEquals(expectedOutput.toString(), output,
"Content of " + expectedFile);
}
/**
* Verify storage | JobData |
java | junit-team__junit5 | junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/config/DefaultJupiterConfiguration.java | {
"start": 3711,
"end": 10394
} | class ____"));
private static final ConfigurationParameterConverter<CleanupMode> cleanupModeConverter = //
new EnumConfigurationParameterConverter<>(CleanupMode.class, "cleanup mode");
private static final InstantiatingConfigurationParameterConverter<TempDirFactory> tempDirFactoryConverter = //
new InstantiatingConfigurationParameterConverter<>(TempDirFactory.class, "temp dir factory");
private static final ConfigurationParameterConverter<ExtensionContextScope> extensionContextScopeConverter = //
new EnumConfigurationParameterConverter<>(ExtensionContextScope.class, "extension context scope");
private final ConfigurationParameters configurationParameters;
private final OutputDirectoryCreator outputDirectoryCreator;
public DefaultJupiterConfiguration(ConfigurationParameters configurationParameters,
OutputDirectoryCreator outputDirectoryCreator, DiscoveryIssueReporter issueReporter) {
this.configurationParameters = Preconditions.notNull(configurationParameters,
"ConfigurationParameters must not be null");
this.outputDirectoryCreator = outputDirectoryCreator;
validateConfigurationParameters(issueReporter);
}
private void validateConfigurationParameters(DiscoveryIssueReporter issueReporter) {
UNSUPPORTED_CONFIGURATION_PARAMETERS.forEach(key -> configurationParameters.get(key) //
.ifPresent(value -> {
var warning = DiscoveryIssue.create(Severity.WARNING, """
The '%s' configuration parameter is no longer supported. \
Please remove it from your configuration.""".formatted(key));
issueReporter.reportIssue(warning);
}));
if (isParallelExecutionEnabled()
&& configurationParameters.get(PARALLEL_CONFIG_EXECUTOR_SERVICE_PROPERTY_NAME).isEmpty()) {
var info = DiscoveryIssue.create(Severity.INFO,
"Parallel test execution is enabled but the default ForkJoinPool-based executor service will be used. "
+ "Please give the new implementation based on a regular thread pool a try by setting the '"
+ PARALLEL_CONFIG_EXECUTOR_SERVICE_PROPERTY_NAME + "' configuration parameter to '"
+ WORKER_THREAD_POOL + "' and report any issues to the JUnit team. "
+ "Alternatively, set the configuration parameter to '" + FORK_JOIN_POOL
+ "' to hide this message and keep using the original implementation.");
issueReporter.reportIssue(info);
}
}
@Override
public Predicate<Class<? extends Extension>> getFilterForAutoDetectedExtensions() {
String includePattern = getExtensionAutoDetectionIncludePattern();
String excludePattern = getExtensionAutoDetectionExcludePattern();
Predicate<String> predicate = ClassNamePatternFilterUtils.includeMatchingClassNames(includePattern) //
.and(ClassNamePatternFilterUtils.excludeMatchingClassNames(excludePattern));
return clazz -> predicate.test(clazz.getName());
}
private String getExtensionAutoDetectionIncludePattern() {
return configurationParameters.get(EXTENSIONS_AUTODETECTION_INCLUDE_PROPERTY_NAME) //
.orElse(ClassNamePatternFilterUtils.ALL_PATTERN);
}
private String getExtensionAutoDetectionExcludePattern() {
return configurationParameters.get(EXTENSIONS_AUTODETECTION_EXCLUDE_PROPERTY_NAME) //
.orElse(ClassNamePatternFilterUtils.BLANK);
}
@Override
public Optional<String> getRawConfigurationParameter(String key) {
return configurationParameters.get(key);
}
@Override
public <T> Optional<T> getRawConfigurationParameter(String key, Function<? super String, ? extends T> transformer) {
return configurationParameters.get(key, transformer);
}
@Override
public boolean isParallelExecutionEnabled() {
return configurationParameters.getBoolean(PARALLEL_EXECUTION_ENABLED_PROPERTY_NAME).orElse(false);
}
@Override
public boolean isClosingStoredAutoCloseablesEnabled() {
return configurationParameters.getBoolean(CLOSING_STORED_AUTO_CLOSEABLE_ENABLED_PROPERTY_NAME).orElse(true);
}
@Override
public boolean isExtensionAutoDetectionEnabled() {
return configurationParameters.getBoolean(EXTENSIONS_AUTODETECTION_ENABLED_PROPERTY_NAME).orElse(false);
}
@Override
public boolean isThreadDumpOnTimeoutEnabled() {
return configurationParameters.getBoolean(EXTENSIONS_TIMEOUT_THREAD_DUMP_ENABLED_PROPERTY_NAME).orElse(false);
}
@Override
public ExecutionMode getDefaultExecutionMode() {
return executionModeConverter.getOrDefault(configurationParameters, DEFAULT_EXECUTION_MODE_PROPERTY_NAME,
ExecutionMode.SAME_THREAD);
}
@Override
public ExecutionMode getDefaultClassesExecutionMode() {
return executionModeConverter.getOrDefault(configurationParameters,
DEFAULT_CLASSES_EXECUTION_MODE_PROPERTY_NAME, getDefaultExecutionMode());
}
@Override
public Lifecycle getDefaultTestInstanceLifecycle() {
return lifecycleConverter.getOrDefault(configurationParameters, DEFAULT_TEST_INSTANCE_LIFECYCLE_PROPERTY_NAME,
Lifecycle.PER_METHOD);
}
@Override
public Predicate<ExecutionCondition> getExecutionConditionFilter() {
return ClassNamePatternFilterUtils.excludeMatchingClasses(
configurationParameters.get(DEACTIVATE_CONDITIONS_PATTERN_PROPERTY_NAME).orElse(null));
}
@Override
public DisplayNameGenerator getDefaultDisplayNameGenerator() {
return displayNameGeneratorConverter.get(configurationParameters, DEFAULT_DISPLAY_NAME_GENERATOR_PROPERTY_NAME) //
.orElseGet(() -> DisplayNameGenerator.getDisplayNameGenerator(DisplayNameGenerator.Standard.class));
}
@Override
public Optional<MethodOrderer> getDefaultTestMethodOrderer() {
return methodOrdererConverter.get(configurationParameters, DEFAULT_TEST_METHOD_ORDER_PROPERTY_NAME);
}
@Override
public Optional<ClassOrderer> getDefaultTestClassOrderer() {
return classOrdererConverter.get(configurationParameters, DEFAULT_TEST_CLASS_ORDER_PROPERTY_NAME);
}
@Override
public CleanupMode getDefaultTempDirCleanupMode() {
return cleanupModeConverter.getOrDefault(configurationParameters, DEFAULT_CLEANUP_MODE_PROPERTY_NAME, ALWAYS);
}
@Override
public Supplier<TempDirFactory> getDefaultTempDirFactorySupplier() {
Supplier<Optional<TempDirFactory>> supplier = tempDirFactoryConverter.supply(configurationParameters,
DEFAULT_FACTORY_PROPERTY_NAME);
return () -> supplier.get().orElse(TempDirFactory.Standard.INSTANCE);
}
@SuppressWarnings("deprecation")
@Override
public ExtensionContextScope getDefaultTestInstantiationExtensionContextScope() {
return extensionContextScopeConverter.getOrDefault(configurationParameters,
DEFAULT_TEST_INSTANTIATION_EXTENSION_CONTEXT_SCOPE_PROPERTY_NAME, ExtensionContextScope.DEFAULT);
}
@Override
public OutputDirectoryCreator getOutputDirectoryCreator() {
return outputDirectoryCreator;
}
}
| orderer |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KubernetesDeploymentsEndpointBuilderFactory.java | {
"start": 33807,
"end": 37639
} | interface ____ extends EndpointProducerBuilder {
default KubernetesDeploymentsEndpointProducerBuilder basic() {
return (KubernetesDeploymentsEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedKubernetesDeploymentsEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedKubernetesDeploymentsEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Connection timeout in milliseconds to use when making requests to the
* Kubernetes API server.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: advanced
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default AdvancedKubernetesDeploymentsEndpointProducerBuilder connectionTimeout(Integer connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
/**
* Connection timeout in milliseconds to use when making requests to the
* Kubernetes API server.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: advanced
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default AdvancedKubernetesDeploymentsEndpointProducerBuilder connectionTimeout(String connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
}
/**
* Builder for endpoint for the Kubernetes Deployments component.
*/
public | AdvancedKubernetesDeploymentsEndpointProducerBuilder |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-netty4/src/main/java/org/apache/dubbo/remoting/transport/netty4/NettyEventLoopFactory.java | {
"start": 1912,
"end": 3390
} | class ____ {
/**
* netty client bootstrap
*/
public static final GlobalResourceInitializer<EventLoopGroup> NIO_EVENT_LOOP_GROUP =
new GlobalResourceInitializer<>(
() -> eventLoopGroup(Constants.DEFAULT_IO_THREADS, "NettyClientWorker"),
eventLoopGroup -> eventLoopGroup.shutdownGracefully());
public static EventLoopGroup eventLoopGroup(int threads, String threadFactoryName) {
ThreadFactory threadFactory = new DefaultThreadFactory(threadFactoryName, true);
if (shouldEpoll()) {
return new EpollEventLoopGroup(threads, threadFactory);
} else {
return new NioEventLoopGroup(threads, threadFactory);
}
}
public static Class<? extends SocketChannel> socketChannelClass() {
return shouldEpoll() ? EpollSocketChannel.class : NioSocketChannel.class;
}
public static Class<? extends ServerSocketChannel> serverSocketChannelClass() {
return shouldEpoll() ? EpollServerSocketChannel.class : NioServerSocketChannel.class;
}
private static boolean shouldEpoll() {
if (Boolean.parseBoolean(SystemPropertyConfigUtils.getSystemProperty(NETTY_EPOLL_ENABLE_KEY, "false"))) {
String osName = SystemPropertyConfigUtils.getSystemProperty(SYSTEM_OS_NAME);
return osName.toLowerCase().contains(OS_LINUX_PREFIX) && Epoll.isAvailable();
}
return false;
}
}
| NettyEventLoopFactory |
java | google__error-prone | test_helpers/src/test/java/com/google/errorprone/CompilationTestHelperTest.java | {
"start": 16645,
"end": 17075
} | class ____ {}
""");
AssertionError expected =
assertThrows(AssertionError.class, () -> compilationTestHelper.doTest());
assertThat(expected)
.hasMessageThat()
.contains("An unhandled exception was thrown by the Error Prone static analysis plugin");
}
/** A BugPattern that always throws. */
@BugPattern(summary = "A really broken checker.", severity = ERROR)
public static | Test |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/userguide/AddressType.java | {
"start": 230,
"end": 309
} | enum ____ {
HOME,
OFFICE
}
//end::hql-examples-domain-model-example[]
| AddressType |
java | apache__camel | components/camel-tahu/src/test/java/org/apache/camel/component/tahu/TahuEdgeProducerIT.java | {
"start": 1463,
"end": 1519
} | class ____ extends TahuTestSupport {
| TahuEdgeProducerIT |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/completion/AzureAiStudioChatCompletionTaskSettings.java | {
"start": 1830,
"end": 8342
} | class ____ implements TaskSettings {
public static final String NAME = "azure_ai_studio_chat_completion_task_settings";
public static final Integer DEFAULT_MAX_NEW_TOKENS = 64;
public static AzureAiStudioChatCompletionTaskSettings fromMap(Map<String, Object> map) {
ValidationException validationException = new ValidationException();
var temperature = extractOptionalDoubleInRange(
map,
TEMPERATURE_FIELD,
AzureAiStudioConstants.MIN_TEMPERATURE_TOP_P,
AzureAiStudioConstants.MAX_TEMPERATURE_TOP_P,
ModelConfigurations.TASK_SETTINGS,
validationException
);
var topP = extractOptionalDoubleInRange(
map,
TOP_P_FIELD,
AzureAiStudioConstants.MIN_TEMPERATURE_TOP_P,
AzureAiStudioConstants.MAX_TEMPERATURE_TOP_P,
ModelConfigurations.TASK_SETTINGS,
validationException
);
var doSample = extractOptionalBoolean(map, DO_SAMPLE_FIELD, validationException);
var maxNewTokens = extractOptionalPositiveInteger(
map,
MAX_NEW_TOKENS_FIELD,
ModelConfigurations.TASK_SETTINGS,
validationException
);
if (validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
return new AzureAiStudioChatCompletionTaskSettings(temperature, topP, doSample, maxNewTokens);
}
/**
* Creates a new {@link AzureOpenAiEmbeddingsTaskSettings} object by overriding the values in originalSettings with the ones
* passed in via requestSettings if the fields are not null.
* @param originalSettings the original {@link AzureOpenAiEmbeddingsTaskSettings} from the inference entity configuration from storage
* @param requestSettings the {@link AzureOpenAiEmbeddingsTaskSettings} from the request
* @return a new {@link AzureOpenAiEmbeddingsTaskSettings}
*/
public static AzureAiStudioChatCompletionTaskSettings of(
AzureAiStudioChatCompletionTaskSettings originalSettings,
AzureAiStudioChatCompletionRequestTaskSettings requestSettings
) {
var temperature = requestSettings.temperature() == null ? originalSettings.temperature() : requestSettings.temperature();
var topP = requestSettings.topP() == null ? originalSettings.topP() : requestSettings.topP();
var doSample = requestSettings.doSample() == null ? originalSettings.doSample() : requestSettings.doSample();
var maxNewTokens = requestSettings.maxNewTokens() == null ? originalSettings.maxNewTokens() : requestSettings.maxNewTokens();
return new AzureAiStudioChatCompletionTaskSettings(temperature, topP, doSample, maxNewTokens);
}
public AzureAiStudioChatCompletionTaskSettings(
@Nullable Double temperature,
@Nullable Double topP,
@Nullable Boolean doSample,
@Nullable Integer maxNewTokens
) {
this.temperature = temperature;
this.topP = topP;
this.doSample = doSample;
this.maxNewTokens = maxNewTokens;
}
public AzureAiStudioChatCompletionTaskSettings(StreamInput in) throws IOException {
this.temperature = in.readOptionalDouble();
this.topP = in.readOptionalDouble();
this.doSample = in.readOptionalBoolean();
this.maxNewTokens = in.readOptionalInt();
}
private final Double temperature;
private final Double topP;
private final Boolean doSample;
private final Integer maxNewTokens;
public Double temperature() {
return temperature;
}
public Double topP() {
return topP;
}
public Boolean doSample() {
return doSample;
}
public Integer maxNewTokens() {
return maxNewTokens;
}
public boolean areAnyParametersAvailable() {
return temperature != null && topP != null && doSample != null && maxNewTokens != null;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.V_8_14_0;
}
@Override
public boolean isEmpty() {
return temperature == null && topP == null && doSample == null && maxNewTokens == null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalDouble(temperature);
out.writeOptionalDouble(topP);
out.writeOptionalBoolean(doSample);
out.writeOptionalInt(maxNewTokens);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (temperature != null) {
builder.field(TEMPERATURE_FIELD, temperature);
}
if (topP != null) {
builder.field(TOP_P_FIELD, topP);
}
if (doSample != null) {
builder.field(DO_SAMPLE_FIELD, doSample);
}
if (maxNewTokens != null) {
builder.field(MAX_NEW_TOKENS_FIELD, maxNewTokens);
}
builder.endObject();
return builder;
}
@Override
public String toString() {
return "AzureAiStudioChatCompletionTaskSettings{"
+ "temperature="
+ temperature
+ ", topP="
+ topP
+ ", doSample="
+ doSample
+ ", maxNewTokens="
+ maxNewTokens
+ '}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AzureAiStudioChatCompletionTaskSettings that = (AzureAiStudioChatCompletionTaskSettings) o;
return Objects.equals(temperature, that.temperature)
&& Objects.equals(topP, that.topP)
&& Objects.equals(doSample, that.doSample)
&& Objects.equals(maxNewTokens, that.maxNewTokens);
}
@Override
public int hashCode() {
return Objects.hash(temperature, topP, doSample, maxNewTokens);
}
@Override
public TaskSettings updatedTaskSettings(Map<String, Object> newSettings) {
AzureAiStudioChatCompletionRequestTaskSettings requestSettings = AzureAiStudioChatCompletionRequestTaskSettings.fromMap(
new HashMap<>(newSettings)
);
return of(this, requestSettings);
}
}
| AzureAiStudioChatCompletionTaskSettings |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/VerifiableConsumer.java | {
"start": 13123,
"end": 13891
} | class ____ extends ConsumerEvent {
private final long count;
private final List<RecordSetSummary> partitionSummaries;
public RecordsConsumed(long count, List<RecordSetSummary> partitionSummaries) {
this.count = count;
this.partitionSummaries = partitionSummaries;
}
@Override
public String name() {
return "records_consumed";
}
@JsonProperty
public long count() {
return count;
}
@JsonProperty
public List<RecordSetSummary> partitions() {
return partitionSummaries;
}
}
@JsonPropertyOrder({ "timestamp", "name", "key", "value", "topic", "partition", "offset" })
public static | RecordsConsumed |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/MapFunctionExpressionsTest.java | {
"start": 1232,
"end": 4297
} | class ____ {
@BeforeEach
public void prepareTestData(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( s -> {
AddressType homeType = new AddressType( 1, "home" );
s.persist( homeType );
Address address = new Address( 1, "Main St.", "Somewhere, USA" );
s.persist( address );
Contact contact = new Contact( 1, "John" );
contact.addresses.put( homeType, address );
s.persist( contact );
} );
}
@AfterEach
public void cleanUpTestData(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void testMapKeyExpressionInWhere(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (s) -> {
// NOTE : JPA requires that an alias be used in the key() expression.
// Hibernate allows path or alias.
// JPA form
var results = s.createQuery( "select c from Contact c join c.addresses a where key(a) is not null" ).list();
assertEquals( 1, results.size() );
assertThat( results.get(0) ).isInstanceOf( Contact.class );
// Hibernate additional form
results = s.createQuery( "select c from Contact c where key(c.addresses) is not null" ).list();
assertEquals( 1, results.size() );
assertThat( results.get(0) ).isInstanceOf( Contact.class );
} );
}
@Test
public void testMapKeyExpressionInSelect(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (s) -> {
// NOTE : JPA requires that an alias be used in the key() expression.
// Hibernate allows path or alias.
// JPA form
var results = s.createQuery( "select key(a) from Contact c join c.addresses a" ).list();
assertEquals( 1, results.size() );
assertThat( results.get(0) ).isInstanceOf( AddressType.class );
// Hibernate additional form
results = s.createQuery( "select key(c.addresses) from Contact c" ).list();
assertEquals( 1, results.size() );
assertThat( results.get(0) ).isInstanceOf( AddressType.class );
} );
}
@Test
public void testMapValueExpressionInSelect(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (s) -> {
var results = s.createQuery( "select value(a) from Contact c join c.addresses a" ).list();
assertEquals( 1, results.size() );
assertThat( results.get(0) ).isInstanceOf( Address.class );
results = s.createQuery( "select value(c.addresses) from Contact c" ).list();
assertEquals( 1, results.size() );
assertThat( results.get(0) ).isInstanceOf( Address.class );
} );
}
@Test
public void testMapEntryExpressionInSelect(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (s) -> {
var results = s.createQuery( "select entry(a) from Contact c join c.addresses a" ).list();
assertEquals( 1, results.size() );
assertThat( results.get(0) ).isInstanceOf( Map.Entry.class );
results = s.createQuery( "select entry(c.addresses) from Contact c" ).list();
assertEquals( 1, results.size() );
assertThat( results.get(0) ).isInstanceOf( Map.Entry.class );
} );
}
@Entity(name = "AddressType")
@Table(name = "address_type")
public static | MapFunctionExpressionsTest |
java | quarkusio__quarkus | extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/AccessTokenAnnotationTest.java | {
"start": 8704,
"end": 8832
} | class ____ extends AccessTokenRequestReactiveFilter {
}
@Path("/frontend")
public static | CustomAccessTokenRequestFilter |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/stereotypes/InconsistentPriorityStereotypesOverriddenOnBeanTest.java | {
"start": 1742,
"end": 1835
} | interface ____ {
}
@Dependent
@Stereotype1
@Priority(789)
static | Stereotype3 |
java | apache__camel | components/camel-telemetry-dev/src/generated/java/org/apache/camel/telemetrydev/TelemetryDevTracerConfigurer.java | {
"start": 712,
"end": 3960
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.telemetrydev.TelemetryDevTracer target = (org.apache.camel.telemetrydev.TelemetryDevTracer) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "camelcontext":
case "camelContext": target.setCamelContext(property(camelContext, org.apache.camel.CamelContext.class, value)); return true;
case "excludepatterns":
case "excludePatterns": target.setExcludePatterns(property(camelContext, java.lang.String.class, value)); return true;
case "spanlifecyclemanager":
case "spanLifecycleManager": target.setSpanLifecycleManager(property(camelContext, org.apache.camel.telemetry.SpanLifecycleManager.class, value)); return true;
case "traceformat":
case "traceFormat": target.setTraceFormat(property(camelContext, java.lang.String.class, value)); return true;
case "traceheadersinclusion":
case "traceHeadersInclusion": target.setTraceHeadersInclusion(property(camelContext, boolean.class, value)); return true;
case "traceprocessors":
case "traceProcessors": target.setTraceProcessors(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "camelcontext":
case "camelContext": return org.apache.camel.CamelContext.class;
case "excludepatterns":
case "excludePatterns": return java.lang.String.class;
case "spanlifecyclemanager":
case "spanLifecycleManager": return org.apache.camel.telemetry.SpanLifecycleManager.class;
case "traceformat":
case "traceFormat": return java.lang.String.class;
case "traceheadersinclusion":
case "traceHeadersInclusion": return boolean.class;
case "traceprocessors":
case "traceProcessors": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.telemetrydev.TelemetryDevTracer target = (org.apache.camel.telemetrydev.TelemetryDevTracer) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "camelcontext":
case "camelContext": return target.getCamelContext();
case "excludepatterns":
case "excludePatterns": return target.getExcludePatterns();
case "spanlifecyclemanager":
case "spanLifecycleManager": return target.getSpanLifecycleManager();
case "traceformat":
case "traceFormat": return target.getTraceFormat();
case "traceheadersinclusion":
case "traceHeadersInclusion": return target.isTraceHeadersInclusion();
case "traceprocessors":
case "traceProcessors": return target.isTraceProcessors();
default: return null;
}
}
}
| TelemetryDevTracerConfigurer |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/executor/loader/javassist/JavassistSerialStateHolder.java | {
"start": 972,
"end": 1890
} | class ____ extends AbstractSerialStateHolder {
private static final long serialVersionUID = 8940388717901644661L;
public JavassistSerialStateHolder() {
}
public JavassistSerialStateHolder(final Object userBean,
final Map<String, ResultLoaderMap.LoadPair> unloadedProperties, final ObjectFactory objectFactory,
List<Class<?>> constructorArgTypes, List<Object> constructorArgs) {
super(userBean, unloadedProperties, objectFactory, constructorArgTypes, constructorArgs);
}
@Override
protected Object createDeserializationProxy(Object target, Map<String, ResultLoaderMap.LoadPair> unloadedProperties,
ObjectFactory objectFactory, List<Class<?>> constructorArgTypes, List<Object> constructorArgs) {
return new JavassistProxyFactory().createDeserializationProxy(target, unloadedProperties, objectFactory,
constructorArgTypes, constructorArgs);
}
}
| JavassistSerialStateHolder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/instantiation/DynamicInstantiationWithJoinAndGroupByAndParameterTest.java | {
"start": 3163,
"end": 4062
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String partnerNumber;
private String title;
@ManyToOne(fetch = FetchType.LAZY, cascade = { CascadeType.MERGE })
@JoinColumn(name = "fk_user_id")
private UserEntity user;
public Action() {
}
public Action(String partnerNumber, String title, UserEntity user) {
this.partnerNumber = partnerNumber;
this.title = title;
this.user = user;
}
public Long getId() {
return id;
}
public String getPartnerNumber() {
return partnerNumber;
}
public void setPartnerNumber(String partnerNumber) {
this.partnerNumber = partnerNumber;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public UserEntity getUser() {
return user;
}
public void setUser(UserEntity user) {
this.user = user;
}
}
public static | Action |
java | grpc__grpc-java | examples/example-jwt-auth/src/main/java/io/grpc/examples/jwtauth/AuthServer.java | {
"start": 2805,
"end": 3324
} | class ____ extends GreeterGrpc.GreeterImplBase {
@Override
public void sayHello(HelloRequest req, StreamObserver<HelloReply> responseObserver) {
// get client id added to context by interceptor
String clientId = Constant.CLIENT_ID_CONTEXT_KEY.get();
logger.info("Processing request from " + clientId);
HelloReply reply = HelloReply.newBuilder().setMessage("Hello, " + req.getName()).build();
responseObserver.onNext(reply);
responseObserver.onCompleted();
}
}
}
| GreeterImpl |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java | {
"start": 1119,
"end": 6082
} | class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("quart", ElementType.BYTES_REF) );
private final DriverContext driverContext;
private final QuantileStates.SingleState state;
private final List<Integer> channels;
public MedianAbsoluteDeviationDoubleAggregatorFunction(DriverContext driverContext,
List<Integer> channels, QuantileStates.SingleState state) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
}
public static MedianAbsoluteDeviationDoubleAggregatorFunction create(DriverContext driverContext,
List<Integer> channels) {
return new MedianAbsoluteDeviationDoubleAggregatorFunction(driverContext, channels, MedianAbsoluteDeviationDoubleAggregator.initSingle(driverContext));
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
DoubleBlock vBlock = page.getBlock(channels.get(0));
DoubleVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock, mask);
return;
}
addRawVector(vVector, mask);
}
private void addRawInputNotMasked(Page page) {
DoubleBlock vBlock = page.getBlock(channels.get(0));
DoubleVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock);
return;
}
addRawVector(vVector);
}
private void addRawVector(DoubleVector vVector) {
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
double vValue = vVector.getDouble(valuesPosition);
MedianAbsoluteDeviationDoubleAggregator.combine(state, vValue);
}
}
private void addRawVector(DoubleVector vVector, BooleanVector mask) {
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
double vValue = vVector.getDouble(valuesPosition);
MedianAbsoluteDeviationDoubleAggregator.combine(state, vValue);
}
}
private void addRawBlock(DoubleBlock vBlock) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
double vValue = vBlock.getDouble(vOffset);
MedianAbsoluteDeviationDoubleAggregator.combine(state, vValue);
}
}
}
private void addRawBlock(DoubleBlock vBlock, BooleanVector mask) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
double vValue = vBlock.getDouble(vOffset);
MedianAbsoluteDeviationDoubleAggregator.combine(state, vValue);
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block quartUncast = page.getBlock(channels.get(0));
if (quartUncast.areAllValuesNull()) {
return;
}
BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector();
assert quart.getPositionCount() == 1;
BytesRef quartScratch = new BytesRef();
MedianAbsoluteDeviationDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, quartScratch));
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
blocks[offset] = MedianAbsoluteDeviationDoubleAggregator.evaluateFinal(state, driverContext);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| MedianAbsoluteDeviationDoubleAggregatorFunction |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/util/HttpHeadersUtil.java | {
"start": 1228,
"end": 1361
} | class ____ work with {@link io.micronaut.http.HttpHeaders} or HTTP Headers.
* @author Sergio del Amo
* @since 3.8.0
*/
public final | to |
java | elastic__elasticsearch | test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java | {
"start": 3203,
"end": 23091
} | enum ____ {
NOT_SPECIFIED,
YES,
NO,
MISMATCHED
}
XPackRequired xpackRequired = XPackRequired.NOT_SPECIFIED;
public PrerequisiteSectionBuilder skipIfAwaitsFix(String bugUrl) {
this.skipAwaitsFix = bugUrl;
return this;
}
public PrerequisiteSectionBuilder skipIfVersion(String skipVersionRange) {
this.skipVersionRange = skipVersionRange;
return this;
}
public PrerequisiteSectionBuilder setSkipReason(String skipReason) {
this.skipReason = skipReason;
return this;
}
public PrerequisiteSectionBuilder setRequiresReason(String requiresReason) {
this.requiresReason = requiresReason;
return this;
}
public PrerequisiteSectionBuilder requireYamlRunnerFeature(String featureName) {
requiredYamlRunnerFeatures.add(featureName);
return this;
}
public PrerequisiteSectionBuilder requireXPack() {
if (xpackRequired == XPackRequired.NO) {
xpackRequired = XPackRequired.MISMATCHED;
} else {
xpackRequired = XPackRequired.YES;
}
return this;
}
public PrerequisiteSectionBuilder skipIfXPack() {
if (xpackRequired == XPackRequired.YES) {
xpackRequired = XPackRequired.MISMATCHED;
} else {
xpackRequired = XPackRequired.NO;
}
return this;
}
public PrerequisiteSectionBuilder skipIfClusterFeature(String featureName) {
skipClusterFeatures.add(featureName);
return this;
}
public PrerequisiteSectionBuilder skipKnownIssue(KnownIssue knownIssue) {
skipKnownIssues.add(knownIssue);
return this;
}
public PrerequisiteSectionBuilder skipIfCapabilities(CapabilitiesCheck capabilitiesCheck) {
skipCapabilities.add(capabilitiesCheck);
return this;
}
public PrerequisiteSectionBuilder requireClusterFeature(String featureName) {
requiredClusterFeatures.add(featureName);
return this;
}
public PrerequisiteSectionBuilder requireCapabilities(CapabilitiesCheck capabilitiesCheck) {
requiredCapabilities.add(capabilitiesCheck);
return this;
}
public PrerequisiteSectionBuilder skipIfOs(String osName) {
this.skipOperatingSystems.add(osName);
return this;
}
void validate(XContentLocation contentLocation) {
if ((Strings.isEmpty(skipVersionRange))
&& skipOperatingSystems.isEmpty()
&& skipClusterFeatures.isEmpty()
&& skipCapabilities.isEmpty()
&& skipKnownIssues.isEmpty()
&& Strings.isEmpty(skipAwaitsFix)
&& xpackRequired == XPackRequired.NOT_SPECIFIED
&& requiredYamlRunnerFeatures.isEmpty()
&& requiredCapabilities.isEmpty()
&& requiredClusterFeatures.isEmpty()) {
// TODO separate the validation for requires / skip when dropping parsing of legacy fields, e.g. features in skip
throw new ParsingException(contentLocation, "at least one predicate is mandatory within a skip or requires section");
}
if (Strings.isEmpty(skipReason)
&& (Strings.isEmpty(skipVersionRange)
&& skipOperatingSystems.isEmpty()
&& skipClusterFeatures.isEmpty()
&& skipCapabilities.isEmpty()
&& skipKnownIssues.isEmpty()) == false) {
throw new ParsingException(contentLocation, "reason is mandatory within this skip section");
}
if (Strings.isEmpty(requiresReason) && ((requiredClusterFeatures.isEmpty() && requiredCapabilities.isEmpty()) == false)) {
throw new ParsingException(contentLocation, "reason is mandatory within this requires section");
}
// make feature "skip_os" mandatory if os is given, this is a temporary solution until language client tests know about os
if (skipOperatingSystems.isEmpty() == false && requiredYamlRunnerFeatures.contains("skip_os") == false) {
throw new ParsingException(contentLocation, "if os is specified, test runner feature [skip_os] must be set");
}
if (xpackRequired == XPackRequired.MISMATCHED) {
throw new ParsingException(contentLocation, "either [xpack] or [no_xpack] can be present, not both");
}
if (Sets.haveNonEmptyIntersection(skipClusterFeatures, requiredClusterFeatures)) {
throw new ParsingException(contentLocation, "a cluster feature can be specified either in [requires] or [skip], not both");
}
}
public PrerequisiteSection build() {
if (Features.areAllSupported(requiredYamlRunnerFeatures) == false) {
// always skip this section due to missing required test runner features (see {@link Features})
return new PrerequisiteSection(
emptyList(),
skipReason,
List.of(Prerequisites.FALSE),
requiresReason,
requiredYamlRunnerFeatures
);
}
if (Strings.hasLength(skipAwaitsFix)) {
// always skip this section due to a pending fix
return new PrerequisiteSection(
List.of(Prerequisites.TRUE),
skipReason,
emptyList(),
requiresReason,
requiredYamlRunnerFeatures
);
}
final List<Predicate<ClientYamlTestExecutionContext>> skipCriteriaList = new ArrayList<>();
final List<Predicate<ClientYamlTestExecutionContext>> requiresCriteriaList = new ArrayList<>();
if (xpackRequired == XPackRequired.YES) {
requiresCriteriaList.add(Prerequisites.hasXPack());
}
if (requiredClusterFeatures.isEmpty() == false) {
requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures));
}
if (requiredCapabilities.isEmpty() == false) {
requiresCriteriaList.add(Prerequisites.requireCapabilities(requiredCapabilities));
}
if (xpackRequired == XPackRequired.NO) {
skipCriteriaList.add(Prerequisites.hasXPack());
}
if (Strings.hasLength(skipVersionRange)) {
skipCriteriaList.add(Prerequisites.skipOnVersionRange(skipVersionRange));
}
if (skipOperatingSystems.isEmpty() == false) {
skipCriteriaList.add(Prerequisites.skipOnOsList(skipOperatingSystems));
}
if (skipClusterFeatures.isEmpty() == false) {
skipCriteriaList.add(Prerequisites.skipOnClusterFeatures(skipClusterFeatures));
}
if (skipCapabilities.isEmpty() == false) {
skipCriteriaList.add(Prerequisites.skipCapabilities(skipCapabilities));
}
if (skipKnownIssues.isEmpty() == false) {
skipCriteriaList.add(Prerequisites.skipOnKnownIssue(skipKnownIssues));
}
return new PrerequisiteSection(skipCriteriaList, skipReason, requiresCriteriaList, requiresReason, requiredYamlRunnerFeatures);
}
}
/**
* Parse a {@link PrerequisiteSection} if the next field is {@code skip}, otherwise returns {@link PrerequisiteSection#EMPTY}.
*/
public static PrerequisiteSection parseIfNext(XContentParser parser) throws IOException {
return parseInternal(parser).build();
}
private static void maybeAdvanceToNextField(XContentParser parser) throws IOException {
var token = parser.nextToken();
if (token != null && token != XContentParser.Token.END_ARRAY) {
ParserUtils.advanceToFieldName(parser);
}
}
static PrerequisiteSectionBuilder parseInternal(XContentParser parser) throws IOException {
PrerequisiteSectionBuilder builder = new PrerequisiteSectionBuilder();
var hasPrerequisiteSection = false;
var unknownFieldName = false;
ParserUtils.advanceToFieldName(parser);
while (unknownFieldName == false) {
if ("skip".equals(parser.currentName())) {
parseSkipSection(parser, builder);
hasPrerequisiteSection = true;
maybeAdvanceToNextField(parser);
} else if ("requires".equals(parser.currentName())) {
parseRequiresSection(parser, builder);
hasPrerequisiteSection = true;
maybeAdvanceToNextField(parser);
} else {
unknownFieldName = true;
}
}
if (hasPrerequisiteSection) {
builder.validate(parser.getTokenLocation());
}
return builder;
}
private static void parseFeatureField(String feature, PrerequisiteSectionBuilder builder) {
// #31403 introduced YAML test "features" to indicate if the cluster being tested has xpack installed (`xpack`)
// or if it does *not* have xpack installed (`no_xpack`). These are not test runner features, so now that we have
// "modular" skip criteria let's separate them. Eventually, these should move to their own skip section.
if (feature.equals("xpack")) {
builder.requireXPack();
} else if (feature.equals("no_xpack")) {
builder.skipIfXPack();
} else {
builder.requireYamlRunnerFeature(feature);
}
}
// package private for tests
static void parseSkipSection(XContentParser parser, PrerequisiteSectionBuilder builder) throws IOException {
requireStartObject("skip", parser.nextToken());
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
if (parser.currentToken() == XContentParser.Token.FIELD_NAME) continue;
boolean valid = false;
if (parser.currentToken().isValue()) {
valid = switch (parser.currentName()) {
case "reason" -> parseString(parser, builder::setSkipReason);
case "features" -> parseString(parser, f -> parseFeatureField(f, builder));
case "os" -> parseString(parser, builder::skipIfOs);
case "cluster_features" -> parseString(parser, builder::skipIfClusterFeature);
case "awaits_fix" -> parseString(parser, builder::skipIfAwaitsFix);
default -> false;
};
} else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
valid = switch (parser.currentName()) {
case "features" -> parseStrings(parser, f -> parseFeatureField(f, builder));
case "os" -> parseStrings(parser, builder::skipIfOs);
case "cluster_features" -> parseStrings(parser, builder::skipIfClusterFeature);
case "known_issues" -> parseArray(parser, PrerequisiteSection::parseKnownIssue, builder::skipKnownIssue);
case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::skipIfCapabilities);
default -> false;
};
}
if (valid == false) throwUnexpectedField("skip", parser);
}
parser.nextToken();
}
private static void throwUnexpectedField(String section, XContentParser parser) throws IOException {
throw new ParsingException(
parser.getTokenLocation(),
Strings.format("field [%s] of type [%s] not supported within %s section", parser.currentName(), parser.currentToken(), section)
);
}
private static void requireStartObject(String section, XContentParser.Token token) throws IOException {
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException(
Strings.format(
"Expected [%s], found [%s], the %s section is not properly indented",
XContentParser.Token.START_OBJECT,
token,
section
)
);
}
}
private static boolean parseString(XContentParser parser, Consumer<String> consumer) throws IOException {
consumer.accept(parser.text());
return true;
}
private static boolean parseStrings(XContentParser parser, Consumer<String> consumer) throws IOException {
return parseArray(parser, XContentParser::text, consumer);
}
private static <T> boolean parseArray(XContentParser parser, CheckedFunction<XContentParser, T, IOException> item, Consumer<T> consumer)
throws IOException {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
consumer.accept(item.apply(parser));
}
return true;
}
private static KnownIssue parseKnownIssue(XContentParser parser) throws IOException {
Map<String, String> fields = parser.mapStrings();
if (fields.keySet().equals(KnownIssue.FIELD_NAMES) == false) {
throw new ParsingException(
parser.getTokenLocation(),
Strings.format("Expected all of %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet())
);
}
return new KnownIssue(fields.get("cluster_feature"), fields.get("fixed_by"));
}
private static CapabilitiesCheck parseCapabilities(XContentParser parser) throws IOException {
Map<String, Object> fields = parser.map();
if (CapabilitiesCheck.FIELD_NAMES.containsAll(fields.keySet()) == false) {
throw new ParsingException(
parser.getTokenLocation(),
Strings.format("Expected some of %s, but got %s", CapabilitiesCheck.FIELD_NAMES, fields.keySet())
);
}
Object path = fields.get("path");
if (path == null) {
throw new ParsingException(parser.getTokenLocation(), "path is required");
}
return new CapabilitiesCheck(
ensureString(ensureString(fields.getOrDefault("method", "GET"))),
ensureString(path),
stringArrayAsParamString("parameters", fields),
stringArrayAsParamString("capabilities", fields)
);
}
private static String ensureString(Object obj) {
if (obj instanceof String str) return str;
throw new IllegalArgumentException("Expected STRING, but got: " + obj);
}
private static String stringArrayAsParamString(String name, Map<String, Object> fields) {
Object value = fields.get(name);
if (value == null) return null;
if (value instanceof Collection<?> values) {
return values.stream().map(PrerequisiteSection::ensureString).collect(joining(","));
}
return ensureString(value);
}
static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuilder builder) throws IOException {
requireStartObject("requires", parser.nextToken());
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
if (parser.currentToken() == XContentParser.Token.FIELD_NAME) continue;
boolean valid = false;
if (parser.currentToken().isValue()) {
valid = switch (parser.currentName()) {
case "reason" -> parseString(parser, builder::setRequiresReason);
case "test_runner_features" -> parseString(parser, f -> parseFeatureField(f, builder));
case "cluster_features" -> parseString(parser, builder::requireClusterFeature);
default -> false;
};
} else if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
valid = switch (parser.currentName()) {
case "test_runner_features" -> parseStrings(parser, f -> parseFeatureField(f, builder));
case "cluster_features" -> parseStrings(parser, builder::requireClusterFeature);
case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::requireCapabilities);
default -> false;
};
}
if (valid == false) throwUnexpectedField("requires", parser);
}
parser.nextToken();
}
public static final PrerequisiteSection EMPTY = new PrerequisiteSection();
private final List<Predicate<ClientYamlTestExecutionContext>> skipCriteriaList;
private final List<Predicate<ClientYamlTestExecutionContext>> requiresCriteriaList;
private final List<String> yamlRunnerFeatures;
final String skipReason;
final String requireReason;
private PrerequisiteSection() {
this.skipCriteriaList = emptyList();
this.requiresCriteriaList = emptyList();
this.yamlRunnerFeatures = emptyList();
this.skipReason = null;
this.requireReason = null;
}
PrerequisiteSection(
List<Predicate<ClientYamlTestExecutionContext>> skipCriteriaList,
String skipReason,
List<Predicate<ClientYamlTestExecutionContext>> requiresCriteriaList,
String requireReason,
List<String> yamlRunnerFeatures
) {
this.skipCriteriaList = skipCriteriaList;
this.requiresCriteriaList = requiresCriteriaList;
this.yamlRunnerFeatures = yamlRunnerFeatures;
this.skipReason = skipReason;
this.requireReason = requireReason;
}
public boolean hasYamlRunnerFeature(String feature) {
return yamlRunnerFeatures.contains(feature);
}
boolean skipCriteriaMet(ClientYamlTestExecutionContext context) {
return skipCriteriaList.stream().anyMatch(c -> c.test(context));
}
boolean requiresCriteriaMet(ClientYamlTestExecutionContext context) {
return requiresCriteriaList.stream().allMatch(c -> c.test(context));
}
public void evaluate(ClientYamlTestExecutionContext context, String testCandidateDescription) {
if (isEmpty()) {
return;
}
if (requiresCriteriaMet(context) == false) {
throw new AssumptionViolatedException(buildMessage(testCandidateDescription, false));
}
if (skipCriteriaMet(context)) {
throw new AssumptionViolatedException(buildMessage(testCandidateDescription, true));
}
}
boolean isEmpty() {
return skipCriteriaList.isEmpty() && requiresCriteriaList.isEmpty() && yamlRunnerFeatures.isEmpty();
}
String buildMessage(String description, boolean isSkip) {
StringBuilder messageBuilder = new StringBuilder();
messageBuilder.append("[").append(description).append("] skipped,");
var reason = isSkip ? skipReason : requireReason;
if (Strings.isNullOrEmpty(reason) == false) {
messageBuilder.append(" reason: [").append(reason).append("]");
}
if (yamlRunnerFeatures.isEmpty() == false) {
messageBuilder.append(" unsupported features ").append(yamlRunnerFeatures);
}
return messageBuilder.toString();
}
boolean hasCapabilitiesCheck() {
return Stream.concat(skipCriteriaList.stream(), requiresCriteriaList.stream())
.anyMatch(p -> p instanceof Prerequisites.CapabilitiesPredicate);
}
}
| XPackRequired |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/client/standalone/FrameworkExtensionTests.java | {
"start": 3021,
"end": 3846
} | class ____ implements WebTestClientConfigurer {
private final TestRequestPostProcessor requestPostProcessor = new TestRequestPostProcessor();
public TestWebTestClientConfigurer foo(String value) {
this.requestPostProcessor.foo(value);
return this;
}
public TestWebTestClientConfigurer bar(String value) {
this.requestPostProcessor.bar(value);
return this;
}
@Override
public void afterConfigurerAdded(
WebTestClient.Builder builder, WebHttpHandlerBuilder httpHandlerBuilder,
ClientHttpConnector connector) {
if (connector instanceof MockMvcHttpConnector mockMvcConnector) {
builder.clientConnector(mockMvcConnector.with(List.of(this.requestPostProcessor)));
}
}
}
/**
* Test {@code RequestPostProcessor} for custom headers.
*/
private static | TestWebTestClientConfigurer |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/asyncprocessing/operators/AbstractAsyncRunnableStreamOperatorTest.java | {
"start": 28679,
"end": 29559
} | class ____ extends TestOperator {
TestOperatorWithAsyncProcessTimer(
KeySelector<Tuple2<Integer, String>, ?> keySelector, ElementOrder elementOrder) {
super(keySelector, elementOrder);
}
@Override
public void processElement(StreamRecord<Tuple2<Integer, String>> element) throws Exception {
processed.incrementAndGet();
}
@Override
public void onEventTime(InternalTimer<Integer, VoidNamespace> timer) throws Exception {
asyncProcessWithKey(timer.getKey(), () -> super.onEventTime(timer));
}
@Override
public void onProcessingTime(InternalTimer<Integer, VoidNamespace> timer) throws Exception {
asyncProcessWithKey(timer.getKey(), () -> super.onProcessingTime(timer));
}
}
private static | TestOperatorWithAsyncProcessTimer |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ModuleUtils.java | {
"start": 4988,
"end": 5953
} | class ____ to apply; never {@code null}
* @return an immutable list of all such resources found; never {@code null}
* but potentially empty
* @since 1.11
*/
@API(status = INTERNAL, since = "1.11")
public static List<Resource> findAllResourcesInModule(String moduleName, ResourceFilter filter) {
Preconditions.notBlank(moduleName, "Module name must not be null or empty");
Preconditions.notNull(filter, "Resource filter must not be null");
logger.debug(() -> "Looking for resources in module: " + moduleName);
// @formatter:off
Set<ModuleReference> moduleReferences = streamResolvedModules(isEqual(moduleName))
.map(ResolvedModule::reference)
.collect(toSet());
// @formatter:on
return scan(moduleReferences, filter, ModuleUtils.class.getClassLoader());
}
/**
* Find all {@linkplain Resource resources} for the given module.
*
* @param module the module to scan; never {@code null} or <em>empty</em>
* @param filter the | filter |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ConfigurationHelper.java | {
"start": 2222,
"end": 2312
} | class ____ enum
* @param ignoreUnknown should unknown values be ignored?
* @param <E> | of |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java | {
"start": 46564,
"end": 47267
} | enum ____. **/
public final EnumBuilder<EnumDefault<R>> enumeration(String name) {
return EnumBuilder.create(wrap(new EnumDefault<>(bldr)), names, name);
}
/** Build an Avro record type. **/
public final RecordBuilder<RecordDefault<R>> record(String name) {
return RecordBuilder.create(wrap(new RecordDefault<>(bldr)), names, name);
}
private <C> Completion<C> wrap(Completion<C> completion) {
if (wrapper != null) {
return wrapper.wrap(completion);
}
return completion;
}
}
/**
* FieldTypeBuilder adds {@link #unionOf()}, {@link #nullable()}, and
* {@link #optional()} to BaseFieldTypeBuilder.
**/
public static final | type |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java | {
"start": 8702,
"end": 11451
} | class ____ {
private ExtendedBlock currentBlock;
BlockToWrite(ExtendedBlock block) {
setCurrentBlock(block);
}
synchronized ExtendedBlock getCurrentBlock() {
return currentBlock == null ? null : new ExtendedBlock(currentBlock);
}
synchronized long getNumBytes() {
return currentBlock == null ? 0 : currentBlock.getNumBytes();
}
synchronized void setCurrentBlock(ExtendedBlock block) {
currentBlock = (block == null || block.getLocalBlock() == null) ?
null : new ExtendedBlock(block);
}
synchronized void setNumBytes(long numBytes) {
assert currentBlock != null;
currentBlock.setNumBytes(numBytes);
}
synchronized void setGenerationStamp(long generationStamp) {
assert currentBlock != null;
currentBlock.setGenerationStamp(generationStamp);
}
@Override
public synchronized String toString() {
return currentBlock == null ? "null" : currentBlock.toString();
}
}
/**
* Create a socket for a write pipeline
*
* @param first the first datanode
* @param length the pipeline length
* @param client client
* @return the socket connected to the first datanode
*/
static Socket createSocketForPipeline(final DatanodeInfo first,
final int length, final DFSClient client) throws IOException {
final DfsClientConf conf = client.getConf();
final String dnAddr = first.getXferAddr(conf.isConnectToDnViaHostname());
LOG.debug("Connecting to datanode {}", dnAddr);
final InetSocketAddress isa = NetUtils.createSocketAddr(dnAddr);
final Socket sock = client.socketFactory.createSocket();
final int timeout = client.getDatanodeReadTimeout(length);
NetUtils.connect(sock, isa, client.getRandomLocalInterfaceAddr(),
conf.getSocketTimeout());
sock.setTcpNoDelay(conf.getDataTransferTcpNoDelay());
sock.setSoTimeout(timeout);
sock.setKeepAlive(true);
if (conf.getSocketSendBufferSize() > 0) {
sock.setSendBufferSize(conf.getSocketSendBufferSize());
}
LOG.debug("Send buf size {}", sock.getSendBufferSize());
return sock;
}
/**
* if this file is lazy persist
*
* @param stat the HdfsFileStatus of a file
* @return if this file is lazy persist
*/
static boolean isLazyPersist(HdfsFileStatus stat) {
return stat.getStoragePolicy() == HdfsConstants.MEMORY_STORAGE_POLICY_ID;
}
/**
* release a list of packets to ByteArrayManager
*
* @param packets packets to be release
* @param bam ByteArrayManager
*/
private static void releaseBuffer(List<DFSPacket> packets, ByteArrayManager bam) {
for(DFSPacket p : packets) {
p.releaseBuffer(bam);
}
packets.clear();
}
| BlockToWrite |
java | apache__camel | core/camel-main/src/generated/java/org/apache/camel/main/FaultToleranceConfigurationPropertiesConfigurer.java | {
"start": 715,
"end": 7216
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("BulkheadEnabled", java.lang.Boolean.class);
map.put("BulkheadMaxConcurrentCalls", java.lang.Integer.class);
map.put("BulkheadWaitingTaskQueue", java.lang.Integer.class);
map.put("Delay", java.lang.Long.class);
map.put("FailureRatio", java.lang.Integer.class);
map.put("RequestVolumeThreshold", java.lang.Integer.class);
map.put("SuccessThreshold", java.lang.Integer.class);
map.put("ThreadOffloadExecutorService", java.lang.String.class);
map.put("TimeoutDuration", java.lang.Long.class);
map.put("TimeoutEnabled", java.lang.Boolean.class);
map.put("TimeoutPoolSize", java.lang.Integer.class);
map.put("TypedGuard", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.main.FaultToleranceConfigurationProperties target = (org.apache.camel.main.FaultToleranceConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bulkheadenabled":
case "bulkheadEnabled": target.setBulkheadEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "bulkheadmaxconcurrentcalls":
case "bulkheadMaxConcurrentCalls": target.setBulkheadMaxConcurrentCalls(property(camelContext, java.lang.Integer.class, value)); return true;
case "bulkheadwaitingtaskqueue":
case "bulkheadWaitingTaskQueue": target.setBulkheadWaitingTaskQueue(property(camelContext, java.lang.Integer.class, value)); return true;
case "delay": target.setDelay(property(camelContext, java.lang.Long.class, value)); return true;
case "failureratio":
case "failureRatio": target.setFailureRatio(property(camelContext, java.lang.Integer.class, value)); return true;
case "requestvolumethreshold":
case "requestVolumeThreshold": target.setRequestVolumeThreshold(property(camelContext, java.lang.Integer.class, value)); return true;
case "successthreshold":
case "successThreshold": target.setSuccessThreshold(property(camelContext, java.lang.Integer.class, value)); return true;
case "threadoffloadexecutorservice":
case "threadOffloadExecutorService": target.setThreadOffloadExecutorService(property(camelContext, java.lang.String.class, value)); return true;
case "timeoutduration":
case "timeoutDuration": target.setTimeoutDuration(property(camelContext, java.lang.Long.class, value)); return true;
case "timeoutenabled":
case "timeoutEnabled": target.setTimeoutEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "timeoutpoolsize":
case "timeoutPoolSize": target.setTimeoutPoolSize(property(camelContext, java.lang.Integer.class, value)); return true;
case "typedguard":
case "typedGuard": target.setTypedGuard(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "bulkheadenabled":
case "bulkheadEnabled": return java.lang.Boolean.class;
case "bulkheadmaxconcurrentcalls":
case "bulkheadMaxConcurrentCalls": return java.lang.Integer.class;
case "bulkheadwaitingtaskqueue":
case "bulkheadWaitingTaskQueue": return java.lang.Integer.class;
case "delay": return java.lang.Long.class;
case "failureratio":
case "failureRatio": return java.lang.Integer.class;
case "requestvolumethreshold":
case "requestVolumeThreshold": return java.lang.Integer.class;
case "successthreshold":
case "successThreshold": return java.lang.Integer.class;
case "threadoffloadexecutorservice":
case "threadOffloadExecutorService": return java.lang.String.class;
case "timeoutduration":
case "timeoutDuration": return java.lang.Long.class;
case "timeoutenabled":
case "timeoutEnabled": return java.lang.Boolean.class;
case "timeoutpoolsize":
case "timeoutPoolSize": return java.lang.Integer.class;
case "typedguard":
case "typedGuard": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.main.FaultToleranceConfigurationProperties target = (org.apache.camel.main.FaultToleranceConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bulkheadenabled":
case "bulkheadEnabled": return target.getBulkheadEnabled();
case "bulkheadmaxconcurrentcalls":
case "bulkheadMaxConcurrentCalls": return target.getBulkheadMaxConcurrentCalls();
case "bulkheadwaitingtaskqueue":
case "bulkheadWaitingTaskQueue": return target.getBulkheadWaitingTaskQueue();
case "delay": return target.getDelay();
case "failureratio":
case "failureRatio": return target.getFailureRatio();
case "requestvolumethreshold":
case "requestVolumeThreshold": return target.getRequestVolumeThreshold();
case "successthreshold":
case "successThreshold": return target.getSuccessThreshold();
case "threadoffloadexecutorservice":
case "threadOffloadExecutorService": return target.getThreadOffloadExecutorService();
case "timeoutduration":
case "timeoutDuration": return target.getTimeoutDuration();
case "timeoutenabled":
case "timeoutEnabled": return target.getTimeoutEnabled();
case "timeoutpoolsize":
case "timeoutPoolSize": return target.getTimeoutPoolSize();
case "typedguard":
case "typedGuard": return target.getTypedGuard();
default: return null;
}
}
}
| FaultToleranceConfigurationPropertiesConfigurer |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/collection/spi/CollectionSemantics.java | {
"start": 713,
"end": 1227
} | interface ____ the semantics of some sort of
* persistent collection so that Hibernate understands how to manage the
* lifecycle of instances of that sort of collection.
* <p>
* A collection type with semantics described by a {@code CollectionSemantics}
* object need not be part of the Java Collections Framework.
*
* @param <E> the collection element or map key type
* @param <CE> the type of the collection
*
* @author Steve Ebersole
* @author Gavin King
*
* @since 6.0
*/
@Incubating
public | describes |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/flowable/FlowableEventStream.java | {
"start": 1957,
"end": 2624
} | class ____ implements Consumer<Emitter<Event>> {
private final String type;
private final int numInstances;
EventConsumer(String type, int numInstances) {
this.type = type;
this.numInstances = numInstances;
}
@Override
public void accept(Emitter<Event> s) {
s.onNext(randomEvent(type, numInstances));
try {
// slow it down somewhat
Thread.sleep(50);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
s.onError(e);
}
}
}
public static | EventConsumer |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java | {
"start": 1114,
"end": 1202
} | class ____ extends AbstractCustomJavaToolchainResolver {
| OracleOpenJdkToolchainResolver |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/fieldlocation/FieldLocation_isRoot_Test.java | {
"start": 1078,
"end": 1895
} | class ____ {
@ParameterizedTest(name = "{0}")
@MethodSource
void should_evaluate_object_as_root(FieldLocation fieldLocation) {
assertThat(fieldLocation.isRoot()).isTrue();
}
private static Stream<FieldLocation> should_evaluate_object_as_root() {
return Stream.of(rootFieldLocation(),
new FieldLocation(list("[0]")),
new FieldLocation(list("[1]")));
}
@ParameterizedTest(name = "{0}")
@MethodSource
void should_not_evaluate_object_as_root(FieldLocation fieldLocation) {
assertThat(fieldLocation.isRoot()).isFalse();
}
private static Stream<FieldLocation> should_not_evaluate_object_as_root() {
return Stream.of(new FieldLocation(list("[0]", "name")),
new FieldLocation(list("name")));
}
}
| FieldLocation_isRoot_Test |
java | grpc__grpc-java | gcp-observability/src/test/java/io/grpc/gcp/observability/TracesTest.java | {
"start": 2070,
"end": 3463
} | class ____ {
@ClassRule
public static final GrpcCleanupRule cleanupRule = new GrpcCleanupRule();
private static final String PROJECT_ID = "PROJECT";
private static final String CUSTOM_TAG_KEY = "service";
private static final String CUSTOM_TAG_VALUE =
String.format("payment-%s", String.valueOf(System.currentTimeMillis()));
private static final Map<String, String> CUSTOM_TAGS =
Collections.singletonMap(CUSTOM_TAG_KEY, CUSTOM_TAG_VALUE);
private final StaticTestingClassLoader classLoader =
new StaticTestingClassLoader(getClass().getClassLoader(),
Pattern.compile("io\\.grpc\\..*|io\\.opencensus\\..*"));
/**
* End to end cloud trace test.
*
* <p>Ignoring test, because it calls external Cloud Tracing APIs. To test cloud trace setup
* locally,
* 1. Set up Cloud auth credentials
* 2. Assign permissions to service account to write traces to project specified by variable
* PROJECT_ID
* 3. Comment @Ignore annotation
* 4. This test is expected to pass when ran with above setup. This has been verified manually.
*/
@Ignore
@Test
public void testTracesExporter() throws Exception {
Class<?> runnable =
classLoader.loadClass(TracesTest.StaticTestingClassTestTracesExporter.class.getName());
((Runnable) runnable.getDeclaredConstructor().newInstance()).run();
}
public static final | TracesTest |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/format/datetime/DateFormatter.java | {
"start": 1882,
"end": 9718
} | class ____ implements Formatter<Date> {
private static final TimeZone UTC = TimeZone.getTimeZone("UTC");
private static final Map<ISO, String> ISO_PATTERNS;
private static final Map<ISO, String> ISO_FALLBACK_PATTERNS;
static {
// We use an EnumMap instead of Map.of(...) since the former provides better performance.
Map<ISO, String> formats = new EnumMap<>(ISO.class);
formats.put(ISO.DATE, "yyyy-MM-dd");
formats.put(ISO.TIME, "HH:mm:ss.SSSXXX");
formats.put(ISO.DATE_TIME, "yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
ISO_PATTERNS = Collections.unmodifiableMap(formats);
// Fallback format for the time part without milliseconds.
Map<ISO, String> fallbackFormats = new EnumMap<>(ISO.class);
fallbackFormats.put(ISO.TIME, "HH:mm:ssXXX");
fallbackFormats.put(ISO.DATE_TIME, "yyyy-MM-dd'T'HH:mm:ssXXX");
ISO_FALLBACK_PATTERNS = Collections.unmodifiableMap(fallbackFormats);
}
private @Nullable Object source;
private @Nullable String pattern;
private String @Nullable [] fallbackPatterns;
private int style = DateFormat.DEFAULT;
private @Nullable String stylePattern;
private @Nullable ISO iso;
private @Nullable TimeZone timeZone;
private boolean lenient = false;
/**
* Create a new default {@code DateFormatter}.
*/
public DateFormatter() {
}
/**
* Create a new {@code DateFormatter} for the given date time pattern.
*/
public DateFormatter(String pattern) {
this.pattern = pattern;
}
/**
* Set the source of the configuration for this {@code DateFormatter} —
* for example, an instance of the {@link DateTimeFormat @DateTimeFormat}
* annotation if such an annotation was used to configure this {@code DateFormatter}.
* <p>The supplied source object will only be used for descriptive purposes
* by invoking its {@code toString()} method — for example, when
* generating an exception message to provide further context.
* @param source the source of the configuration
* @since 5.3.5
*/
public void setSource(Object source) {
this.source = source;
}
/**
* Set the pattern to use to format date values.
* <p>If not specified, DateFormat's default style will be used.
*/
public void setPattern(String pattern) {
this.pattern = pattern;
}
/**
* Set additional patterns to use as a fallback in case parsing fails for the
* configured {@linkplain #setPattern pattern}, {@linkplain #setIso ISO format},
* {@linkplain #setStyle style}, or {@linkplain #setStylePattern style pattern}.
* @param fallbackPatterns the fallback parsing patterns
* @since 5.3.5
* @see DateTimeFormat#fallbackPatterns()
*/
public void setFallbackPatterns(String... fallbackPatterns) {
this.fallbackPatterns = fallbackPatterns;
}
/**
* Set the ISO format to use to format date values.
* @param iso the {@link ISO} format
* @since 3.2
*/
public void setIso(ISO iso) {
this.iso = iso;
}
/**
* Set the {@link DateFormat} style to use to format date values.
* <p>If not specified, DateFormat's default style will be used.
* @see DateFormat#DEFAULT
* @see DateFormat#SHORT
* @see DateFormat#MEDIUM
* @see DateFormat#LONG
* @see DateFormat#FULL
*/
public void setStyle(int style) {
this.style = style;
}
/**
* Set the two characters to use to format date values.
* <p>The first character is used for the date style; the second is used for
* the time style.
* <p>Supported characters:
* <ul>
* <li>'S' = Small</li>
* <li>'M' = Medium</li>
* <li>'L' = Long</li>
* <li>'F' = Full</li>
* <li>'-' = Omitted</li>
* </ul>
* @param stylePattern two characters from the set {"S", "M", "L", "F", "-"}
* @since 3.2
*/
public void setStylePattern(String stylePattern) {
this.stylePattern = stylePattern;
}
/**
* Set the {@link TimeZone} to normalize the date values into, if any.
*/
public void setTimeZone(TimeZone timeZone) {
this.timeZone = timeZone;
}
/**
* Specify whether parsing is to be lenient. Default is {@code false}.
* <p>With lenient parsing, the parser may allow inputs that do not precisely match the format.
* With strict parsing, inputs must match the format exactly.
*/
public void setLenient(boolean lenient) {
this.lenient = lenient;
}
@Override
public String print(Date date, Locale locale) {
return getDateFormat(locale).format(date);
}
@Override
public Date parse(String text, Locale locale) throws ParseException {
try {
return getDateFormat(locale).parse(text);
}
catch (ParseException ex) {
Set<String> fallbackPatterns = new LinkedHashSet<>();
String isoPattern = ISO_FALLBACK_PATTERNS.get(this.iso);
if (isoPattern != null) {
fallbackPatterns.add(isoPattern);
}
if (!ObjectUtils.isEmpty(this.fallbackPatterns)) {
Collections.addAll(fallbackPatterns, this.fallbackPatterns);
}
if (!fallbackPatterns.isEmpty()) {
for (String pattern : fallbackPatterns) {
try {
DateFormat dateFormat = configureDateFormat(new SimpleDateFormat(pattern, locale));
// Align timezone for parsing format with printing format if ISO is set.
if (this.iso != null && this.iso != ISO.NONE) {
dateFormat.setTimeZone(UTC);
}
return dateFormat.parse(text);
}
catch (ParseException ignoredException) {
// Ignore fallback parsing exceptions since the exception thrown below
// will include information from the "source" if available -- for example,
// the toString() of a @DateTimeFormat annotation.
}
}
}
if (this.source != null) {
ParseException parseException = new ParseException(
String.format("Unable to parse date time value \"%s\" using configuration from %s", text, this.source),
ex.getErrorOffset());
parseException.initCause(ex);
throw parseException;
}
// else rethrow original exception
throw ex;
}
}
protected DateFormat getDateFormat(Locale locale) {
return configureDateFormat(createDateFormat(locale));
}
private DateFormat configureDateFormat(DateFormat dateFormat) {
if (this.timeZone != null) {
dateFormat.setTimeZone(this.timeZone);
}
dateFormat.setLenient(this.lenient);
return dateFormat;
}
private DateFormat createDateFormat(Locale locale) {
if (StringUtils.hasLength(this.pattern)) {
return new SimpleDateFormat(this.pattern, locale);
}
if (this.iso != null && this.iso != ISO.NONE) {
String pattern = ISO_PATTERNS.get(this.iso);
if (pattern == null) {
throw new IllegalStateException("Unsupported ISO format " + this.iso);
}
SimpleDateFormat format = new SimpleDateFormat(pattern);
format.setTimeZone(UTC);
return format;
}
if (StringUtils.hasLength(this.stylePattern)) {
int dateStyle = getStylePatternForChar(0);
int timeStyle = getStylePatternForChar(1);
if (dateStyle != -1 && timeStyle != -1) {
return DateFormat.getDateTimeInstance(dateStyle, timeStyle, locale);
}
if (dateStyle != -1) {
return DateFormat.getDateInstance(dateStyle, locale);
}
if (timeStyle != -1) {
return DateFormat.getTimeInstance(timeStyle, locale);
}
throw unsupportedStylePatternException();
}
return DateFormat.getDateInstance(this.style, locale);
}
private int getStylePatternForChar(int index) {
if (this.stylePattern != null && this.stylePattern.length() > index) {
char ch = this.stylePattern.charAt(index);
return switch (ch) {
case 'S' -> DateFormat.SHORT;
case 'M' -> DateFormat.MEDIUM;
case 'L' -> DateFormat.LONG;
case 'F' -> DateFormat.FULL;
case '-' -> -1;
default -> throw unsupportedStylePatternException();
};
}
throw unsupportedStylePatternException();
}
private IllegalStateException unsupportedStylePatternException() {
return new IllegalStateException("Unsupported style pattern '" + this.stylePattern + "'");
}
}
| DateFormatter |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/jdk/JDKCoreSerializers.java | {
"start": 545,
"end": 2041
} | class ____, and keep things simple and efficient.
*/
protected final static HashMap<String, ValueSerializer<?>> _concrete;
static {
HashMap<String, ValueSerializer<?>> concrete = new HashMap<>();
// String and string-like types (note: date types explicitly
// not included -- can use either textual or numeric serialization)
concrete.put(String.class.getName(), StringSerializer.instance);
final ToStringSerializer sls = ToStringSerializer.instance;
concrete.put(StringBuffer.class.getName(), sls);
concrete.put(StringBuilder.class.getName(), sls);
concrete.put(Character.class.getName(), sls);
concrete.put(Character.TYPE.getName(), sls);
// Primitives/wrappers for primitives (primitives needed for Beans)
NumberSerializers.addAll(concrete);
concrete.put(Boolean.TYPE.getName(), new BooleanSerializer(true));
concrete.put(Boolean.class.getName(), new BooleanSerializer(false));
// Other numbers, more complicated
concrete.put(BigInteger.class.getName(), new NumberSerializer(BigInteger.class));
concrete.put(BigDecimal.class.getName(), new NumberSerializer(BigDecimal.class));
_concrete = concrete;
}
/**
* Method called by {@link BasicSerializerFactory} to find one of serializers provided here.
*/
public static final ValueSerializer<?> find(Class<?> raw)
{
return _concrete.get(raw.getName());
}
}
| name |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/PreemptionResourceRequestPBImpl.java | {
"start": 1445,
"end": 3824
} | class ____ extends PreemptionResourceRequest {
PreemptionResourceRequestProto proto =
PreemptionResourceRequestProto.getDefaultInstance();
PreemptionResourceRequestProto.Builder builder = null;
boolean viaProto = false;
private ResourceRequest rr;
public PreemptionResourceRequestPBImpl() {
builder = PreemptionResourceRequestProto.newBuilder();
}
public PreemptionResourceRequestPBImpl(PreemptionResourceRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public synchronized PreemptionResourceRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void mergeLocalToBuilder() {
if (rr != null) {
builder.setResource(convertToProtoFormat(rr));
}
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = PreemptionResourceRequestProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public synchronized ResourceRequest getResourceRequest() {
PreemptionResourceRequestProtoOrBuilder p = viaProto ? proto : builder;
if (rr != null) {
return rr;
}
if (!p.hasResource()) {
return null;
}
rr = convertFromProtoFormat(p.getResource());
return rr;
}
@Override
public synchronized void setResourceRequest(final ResourceRequest rr) {
maybeInitBuilder();
if (null == rr) {
builder.clearResource();
}
this.rr = rr;
}
private ResourceRequestPBImpl convertFromProtoFormat(ResourceRequestProto p) {
return new ResourceRequestPBImpl(p);
}
private ResourceRequestProto convertToProtoFormat(ResourceRequest t) {
return ((ResourceRequestPBImpl)t).getProto();
}
}
| PreemptionResourceRequestPBImpl |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/HashtableContainsTest.java | {
"start": 1260,
"end": 1727
} | class ____ {
void f(ConcurrentHashMap<String, Integer> m, Integer v) {
// BUG: Diagnostic contains: containsValue(v)
m.contains(v);
}
}
""")
.doTest();
}
@Test
public void positive_hashtable() {
compilationHelper
.addSourceLines(
"test/Test.java",
"""
package test;
import java.util.Hashtable;
| Test |
java | apache__dubbo | dubbo-plugin/dubbo-triple-websocket/src/main/java/org/apache/dubbo/rpc/protocol/tri/websocket/WebSocketConstants.java | {
"start": 863,
"end": 1105
} | interface ____ {
String TRIPLE_WEBSOCKET_UPGRADE_HEADER_VALUE = "websocket";
String TRIPLE_WEBSOCKET_REMOTE_ADDRESS = "tri.websocket.remote.address";
String TRIPLE_WEBSOCKET_LISTENER = "tri.websocket.listener";
}
| WebSocketConstants |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/maps/Maps_assertDoesNotContainKey_Test.java | {
"start": 1966,
"end": 6479
} | class ____ extends MapsBaseTest {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
String key = "name";
// WHEN
var assertionError = expectAssertionError(() -> maps.assertDoesNotContainKey(INFO, null, key));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@ParameterizedTest
@MethodSource({
"unmodifiableMapsSuccessfulTestCases",
"modifiableMapsSuccessfulTestCases",
"caseInsensitiveMapsSuccessfulTestCases",
})
void should_pass(Map<String, String> actual, String expected) {
// WHEN/THEN
assertThatNoException().as(actual.getClass().getName())
.isThrownBy(() -> maps.assertDoesNotContainKey(info, actual, expected));
}
private static Stream<Arguments> unmodifiableMapsSuccessfulTestCases() {
return Stream.of(arguments(emptyMap(), "name"),
arguments(singletonMap("name", "Yoda"), "color"),
arguments(new SingletonMap<>("name", "Yoda"), "color"),
arguments(unmodifiableMap(mapOf(entry("name", "Yoda"), entry("job", "Jedi"))), "color"),
arguments(ImmutableMap.of("name", "Yoda", "job", "Jedi"), "color"),
arguments(Map.of("name", "Yoda", "job", "Jedi"), "color"),
// implementation not permitting null keys
arguments(Map.of("name", "Yoda"), null));
}
private static Stream<Arguments> modifiableMapsSuccessfulTestCases() {
return Stream.of(MODIFIABLE_MAPS)
.flatMap(supplier -> Stream.of(arguments(mapOf(supplier, entry("name", "Yoda")),
"color"),
arguments(mapOf(supplier, entry("name", "Yoda"), entry("job", "Jedi")),
"color")));
}
private static Stream<Arguments> caseInsensitiveMapsSuccessfulTestCases() {
return Stream.of(ArrayUtils.add(CASE_INSENSITIVE_MAPS, CaseInsensitiveMap::new))
.flatMap(supplier -> Stream.of(arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
"color"),
arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
"Color")));
}
@ParameterizedTest
@MethodSource({
"unmodifiableMapsFailureTestCases",
"modifiableMapsFailureTestCases",
"caseInsensitiveMapsFailureTestCases",
})
void should_fail(Map<String, String> actual, String expected) {
// WHEN
assertThatExceptionOfType(AssertionError.class).as(actual.getClass().getName())
.isThrownBy(() -> maps.assertDoesNotContainKey(info, actual, expected))
// THEN
.withMessage(shouldNotContainKey(actual, expected).create());
}
private static Stream<Arguments> unmodifiableMapsFailureTestCases() {
return Stream.of(arguments(singletonMap("name", "Yoda"), "name"),
arguments(new SingletonMap<>("name", "Yoda"), "name"),
arguments(unmodifiableMap(mapOf(entry("name", "Yoda"), entry("job", "Jedi"))), "name"),
arguments(ImmutableMap.of("name", "Yoda", "job", "Jedi"), "name"),
arguments(Map.of("name", "Yoda", "job", "Jedi"), "name"));
}
private static Stream<Arguments> modifiableMapsFailureTestCases() {
return Stream.of(MODIFIABLE_MAPS)
.flatMap(supplier -> Stream.of(arguments(mapOf(supplier, entry("name", "Yoda"), entry("job", "Jedi")), "name"),
arguments(mapOf(supplier, entry("name", "Yoda"), entry("job", "Jedi")), "job")));
}
private static Stream<Arguments> caseInsensitiveMapsFailureTestCases() {
return Stream.of(ArrayUtils.add(CASE_INSENSITIVE_MAPS, CaseInsensitiveMap::new))
.flatMap(supplier -> Stream.of(arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
"name"),
arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
"Name")));
}
}
| Maps_assertDoesNotContainKey_Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/collection/detached/Several.java | {
"start": 433,
"end": 566
} | class ____ {
@GeneratedValue
@Id
long id;
@ManyToMany(cascade = {CascadeType.PERSIST, CascadeType.MERGE})
Set<Many> many;
}
| Several |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ClusterNodeTracker.java | {
"start": 2106,
"end": 15357
} | class ____<N extends SchedulerNode> {
private static final Logger LOG =
LoggerFactory.getLogger(ClusterNodeTracker.class);
private ReadWriteLock readWriteLock = new ReentrantReadWriteLock(true);
private Lock readLock = readWriteLock.readLock();
private Lock writeLock = readWriteLock.writeLock();
private HashMap<NodeId, N> nodes = new HashMap<>();
private Map<String, N> nodeNameToNodeMap = new HashMap<>();
private Map<String, List<N>> nodesPerRack = new HashMap<>();
private Map<String, List<N>> nodesPerLabel = new HashMap<>();
private Resource clusterCapacity = Resources.createResource(0, 0);
private volatile Resource staleClusterCapacity =
Resources.clone(Resources.none());
// Max allocation
private final long[] maxAllocation;
private Resource configuredMaxAllocation;
private boolean forceConfiguredMaxAllocation = true;
private long configuredMaxAllocationWaitTime;
private boolean reportedMaxAllocation = false;
public ClusterNodeTracker() {
maxAllocation = new long[ResourceUtils.getNumberOfCountableResourceTypes()];
Arrays.fill(maxAllocation, -1);
}
public void addNode(N node) {
writeLock.lock();
try {
nodes.put(node.getNodeID(), node);
nodeNameToNodeMap.put(node.getNodeName(), node);
List<N> nodesPerLabels = nodesPerLabel.get(node.getPartition());
if (nodesPerLabels == null) {
nodesPerLabels = new ArrayList<N>();
}
nodesPerLabels.add(node);
// Update new set of nodes for given partition.
nodesPerLabel.put(node.getPartition(), nodesPerLabels);
// Update nodes per rack as well
String rackName = node.getRackName();
List<N> nodesList = nodesPerRack.get(rackName);
if (nodesList == null) {
nodesList = new ArrayList<>();
nodesPerRack.put(rackName, nodesList);
}
nodesList.add(node);
// Update cluster capacity
Resources.addTo(clusterCapacity, node.getTotalResource());
staleClusterCapacity = Resources.clone(clusterCapacity);
ClusterMetrics.getMetrics().incrCapability(node.getTotalResource());
// Update maximumAllocation
updateMaxResources(node, true);
} finally {
writeLock.unlock();
}
}
public boolean exists(NodeId nodeId) {
readLock.lock();
try {
return nodes.containsKey(nodeId);
} finally {
readLock.unlock();
}
}
public N getNode(NodeId nodeId) {
readLock.lock();
try {
return nodes.get(nodeId);
} finally {
readLock.unlock();
}
}
public SchedulerNodeReport getNodeReport(NodeId nodeId) {
readLock.lock();
try {
N n = nodes.get(nodeId);
return n == null ? null : new SchedulerNodeReport(n);
} finally {
readLock.unlock();
}
}
public int nodeCount() {
readLock.lock();
try {
return nodes.size();
} finally {
readLock.unlock();
}
}
public int nodeCount(String rackName) {
readLock.lock();
String rName = rackName == null ? "NULL" : rackName;
try {
List<N> nodesList = nodesPerRack.get(rName);
return nodesList == null ? 0 : nodesList.size();
} finally {
readLock.unlock();
}
}
public Resource getClusterCapacity() {
return staleClusterCapacity;
}
public N removeNode(NodeId nodeId) {
writeLock.lock();
try {
N node = nodes.remove(nodeId);
if (node == null) {
LOG.warn("Attempting to remove a non-existent node " + nodeId);
return null;
}
nodeNameToNodeMap.remove(node.getNodeName());
// Update nodes per rack as well
String rackName = node.getRackName();
List<N> nodesList = nodesPerRack.get(rackName);
if (nodesList == null) {
LOG.error("Attempting to remove node from an empty rack " + rackName);
} else {
nodesList.remove(node);
if (nodesList.isEmpty()) {
nodesPerRack.remove(rackName);
}
}
List<N> nodesPerPartition = nodesPerLabel.get(node.getPartition());
nodesPerPartition.remove(node);
// Update new set of nodes for given partition.
if (nodesPerPartition.isEmpty()) {
nodesPerLabel.remove(node.getPartition());
} else {
nodesPerLabel.put(node.getPartition(), nodesPerPartition);
}
// Update cluster capacity
Resources.subtractFrom(clusterCapacity, node.getTotalResource());
staleClusterCapacity = Resources.clone(clusterCapacity);
ClusterMetrics.getMetrics().decrCapability(node.getTotalResource());
// Update maximumAllocation
updateMaxResources(node, false);
return node;
} finally {
writeLock.unlock();
}
}
public void setConfiguredMaxAllocation(Resource resource) {
writeLock.lock();
try {
configuredMaxAllocation = Resources.clone(resource);
} finally {
writeLock.unlock();
}
}
public void setConfiguredMaxAllocationWaitTime(
long configuredMaxAllocationWaitTime) {
writeLock.lock();
try {
this.configuredMaxAllocationWaitTime =
configuredMaxAllocationWaitTime;
} finally {
writeLock.unlock();
}
}
public Resource getMaxAllowedAllocation() {
readLock.lock();
try {
if (forceConfiguredMaxAllocation &&
System.currentTimeMillis() - ResourceManager.getClusterTimeStamp()
> configuredMaxAllocationWaitTime) {
forceConfiguredMaxAllocation = false;
}
if (forceConfiguredMaxAllocation || !reportedMaxAllocation) {
return configuredMaxAllocation;
}
Resource ret = Resources.clone(configuredMaxAllocation);
for (int i = 0; i < maxAllocation.length; i++) {
ResourceInformation info = ret.getResourceInformation(i);
if (info.getValue() > maxAllocation[i]) {
info.setValue(maxAllocation[i]);
}
}
return ret;
} finally {
readLock.unlock();
}
}
@VisibleForTesting
public void setForceConfiguredMaxAllocation(boolean flag) {
writeLock.lock();
try {
forceConfiguredMaxAllocation = flag;
} finally {
writeLock.unlock();
}
}
private void updateMaxResources(SchedulerNode node, boolean add) {
Resource totalResource = node.getTotalResource();
ResourceInformation[] totalResources;
if (totalResource != null) {
totalResources = totalResource.getResources();
} else {
LOG.warn(node.getNodeName() + " reported in with null resources, which "
+ "indicates a problem in the source code. Please file an issue at "
+ "https://issues.apache.org/jira/secure/CreateIssue!default.jspa");
return;
}
writeLock.lock();
try {
if (add) { // added node
// If we add a node, we must have a max allocation for all resource
// types
reportedMaxAllocation = true;
for (int i = 0; i < maxAllocation.length; i++) {
long value = totalResources[i].getValue();
if (value > maxAllocation[i]) {
maxAllocation[i] = value;
}
}
} else { // removed node
boolean recalculate = false;
for (int i = 0; i < maxAllocation.length; i++) {
if (totalResources[i].getValue() == maxAllocation[i]) {
// No need to set reportedMaxAllocation to false here because we
// will recalculate before we release the lock.
maxAllocation[i] = -1;
recalculate = true;
}
}
// We only have to iterate through the nodes if the current max memory
// or vcores was equal to the removed node's
if (recalculate) {
// Treat it like an empty cluster and add nodes
reportedMaxAllocation = false;
nodes.values().forEach(n -> updateMaxResources(n, true));
}
}
} finally {
writeLock.unlock();
}
}
public List<N> getAllNodes() {
return getNodes(null);
}
/**
* Convenience method to filter nodes based on a condition.
*
* @param nodeFilter A {@link NodeFilter} for filtering the nodes
* @return A list of filtered nodes
*/
public List<N> getNodes(NodeFilter nodeFilter) {
List<N> nodeList = new ArrayList<>();
readLock.lock();
try {
if (nodeFilter == null) {
nodeList.addAll(nodes.values());
} else {
for (N node : nodes.values()) {
if (nodeFilter.accept(node)) {
nodeList.add(node);
}
}
}
} finally {
readLock.unlock();
}
return nodeList;
}
public List<NodeId> getAllNodeIds() {
return getNodeIds(null);
}
/**
* Convenience method to filter nodes based on a condition.
*
* @param nodeFilter A {@link NodeFilter} for filtering the nodes
* @return A list of filtered nodes
*/
public List<NodeId> getNodeIds(NodeFilter nodeFilter) {
List<NodeId> nodeList = new ArrayList<>();
readLock.lock();
try {
if (nodeFilter == null) {
for (N node : nodes.values()) {
nodeList.add(node.getNodeID());
}
} else {
for (N node : nodes.values()) {
if (nodeFilter.accept(node)) {
nodeList.add(node.getNodeID());
}
}
}
} finally {
readLock.unlock();
}
return nodeList;
}
/**
* Convenience method to sort nodes.
* Nodes can change while being sorted. Using a standard sort will fail
* without locking each node, the TreeSet handles this without locks.
*
* @param comparator the comparator to sort the nodes with
* @return sorted set of nodes in the form of a TreeSet
*/
public TreeSet<N> sortedNodeSet(Comparator<N> comparator) {
TreeSet<N> sortedSet = new TreeSet<>(comparator);
readLock.lock();
try {
sortedSet.addAll(nodes.values());
} finally {
readLock.unlock();
}
return sortedSet;
}
/**
* Convenience method to return list of nodes corresponding to resourceName
* passed in the {@link ResourceRequest}.
*
* @param resourceName Host/rack name of the resource, or
* {@link ResourceRequest#ANY}
* @return list of nodes that match the resourceName
*/
public List<N> getNodesByResourceName(final String resourceName) {
Preconditions.checkArgument(
resourceName != null && !resourceName.isEmpty());
List<N> retNodes = new ArrayList<>();
if (ResourceRequest.ANY.equals(resourceName)) {
retNodes.addAll(getAllNodes());
} else if (nodeNameToNodeMap.containsKey(resourceName)) {
retNodes.add(nodeNameToNodeMap.get(resourceName));
} else if (nodesPerRack.containsKey(resourceName)) {
retNodes.addAll(nodesPerRack.get(resourceName));
} else {
LOG.info(
"Could not find a node matching given resourceName " + resourceName);
}
return retNodes;
}
/**
* Convenience method to return list of {@link NodeId} corresponding to
* resourceName passed in the {@link ResourceRequest}.
*
* @param resourceName Host/rack name of the resource, or
* {@link ResourceRequest#ANY}
* @return list of {@link NodeId} that match the resourceName
*/
public List<NodeId> getNodeIdsByResourceName(final String resourceName) {
Preconditions.checkArgument(
resourceName != null && !resourceName.isEmpty());
List<NodeId> retNodes = new ArrayList<>();
if (ResourceRequest.ANY.equals(resourceName)) {
retNodes.addAll(getAllNodeIds());
} else if (nodeNameToNodeMap.containsKey(resourceName)) {
retNodes.add(nodeNameToNodeMap.get(resourceName).getNodeID());
} else if (nodesPerRack.containsKey(resourceName)) {
for (N node : nodesPerRack.get(resourceName)) {
retNodes.add(node.getNodeID());
}
} else {
LOG.info(
"Could not find a node matching given resourceName " + resourceName);
}
return retNodes;
}
/**
* update cached nodes per partition on a node label change event.
* @param partition nodeLabel
* @param nodeIds List of Node IDs
*/
public void updateNodesPerPartition(String partition, Set<NodeId> nodeIds) {
writeLock.lock();
try {
// Clear all entries.
nodesPerLabel.remove(partition);
List<N> nodesPerPartition = new ArrayList<N>();
for (NodeId nodeId : nodeIds) {
N n = getNode(nodeId);
if (n != null) {
nodesPerPartition.add(n);
}
}
// Update new set of nodes for given partition.
nodesPerLabel.put(partition, nodesPerPartition);
} finally {
writeLock.unlock();
}
}
public List<N> getNodesPerPartition(String partition) {
List<N> nodesPerPartition = null;
readLock.lock();
try {
if (nodesPerLabel.containsKey(partition)) {
nodesPerPartition = new ArrayList<N>(nodesPerLabel.get(partition));
}
} finally {
readLock.unlock();
}
return nodesPerPartition;
}
public List<String> getPartitions() {
List<String> partitions = null;
readLock.lock();
try {
partitions = new ArrayList(nodesPerLabel.keySet());
} finally {
readLock.unlock();
}
return partitions;
}
} | ClusterNodeTracker |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/launcher/testservices/NullBindLaunchableService.java | {
"start": 1080,
"end": 1555
} | class ____ extends LaunchableRunningService {
public static final String NAME =
"org.apache.hadoop.service.launcher.testservices.NullBindLaunchableService";
public NullBindLaunchableService() {
this("NullBindLaunchableService");
}
public NullBindLaunchableService(String name) {
super(name);
}
@Override
public Configuration bindArgs(Configuration config, List<String> args)
throws Exception {
return null;
}
}
| NullBindLaunchableService |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/RoutingSlip.java | {
"start": 22696,
"end": 23149
} | class ____ extends AsyncProcessorSupport {
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
AsyncProcessor producer = exchange.getProperty(ExchangePropertyKey.SLIP_PRODUCER, AsyncProcessor.class);
return producer.process(exchange, callback);
}
@Override
public String toString() {
return "RoutingSlipProcessor";
}
}
}
| RoutingSlipProcessor |
java | apache__spark | common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java | {
"start": 2296,
"end": 14351
} | class ____ {
private static final TransportConf conf = new TransportConf("shuffle", MapConfigProvider.EMPTY);
@Test
public void testFetchOne() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("shuffle_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new byte[0])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new FetchShuffleBlocks("app-id", "exec-id", 0, new long[] { 0 }, new int[][] {{ 0 }}, false),
conf);
verify(listener).onBlockFetchSuccess("shuffle_0_0_0", blocks.get("shuffle_0_0_0"));
}
@Test
public void testUseOldProtocol() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("shuffle_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new byte[0])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new OpenBlocks("app-id", "exec-id", blockIds),
new TransportConf("shuffle", new MapConfigProvider(
new HashMap<String, String>() {{
put("spark.shuffle.useOldFetchProtocol", "true");
}}
)));
verify(listener).onBlockFetchSuccess("shuffle_0_0_0", blocks.get("shuffle_0_0_0"));
}
@Test
public void testFetchThreeShuffleBlocks() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("shuffle_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
blocks.put("shuffle_0_0_1", new NioManagedBuffer(ByteBuffer.wrap(new byte[23])));
blocks.put("shuffle_0_0_2", new NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[23])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new FetchShuffleBlocks(
"app-id", "exec-id", 0, new long[] { 0 }, new int[][] {{ 0, 1, 2 }}, false),
conf);
for (int i = 0; i < 3; i ++) {
verify(listener, times(1)).onBlockFetchSuccess(
"shuffle_0_0_" + i, blocks.get("shuffle_0_0_" + i));
}
}
@Test
public void testBatchFetchThreeShuffleBlocks() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("shuffle_0_0_0_3", new NioManagedBuffer(ByteBuffer.wrap(new byte[58])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new FetchShuffleBlocks(
"app-id", "exec-id", 0, new long[] { 0 }, new int[][] {{ 0, 3 }}, true),
conf);
verify(listener, times(1)).onBlockFetchSuccess(
"shuffle_0_0_0_3", blocks.get("shuffle_0_0_0_3"));
}
@Test
public void testFetchThree() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("b0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
blocks.put("b1", new NioManagedBuffer(ByteBuffer.wrap(new byte[23])));
blocks.put("b2", new NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[23])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new OpenBlocks("app-id", "exec-id", blockIds),
conf);
for (int i = 0; i < 3; i ++) {
verify(listener, times(1)).onBlockFetchSuccess("b" + i, blocks.get("b" + i));
}
}
@Test
public void testFailure() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("b0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
blocks.put("b1", null);
blocks.put("b2", null);
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new OpenBlocks("app-id", "exec-id", blockIds),
conf);
// Each failure will cause a failure to be invoked in all remaining block fetches.
verify(listener, times(1)).onBlockFetchSuccess("b0", blocks.get("b0"));
verify(listener, times(1)).onBlockFetchFailure(eq("b1"), any());
verify(listener, times(2)).onBlockFetchFailure(eq("b2"), any());
}
@Test
public void testFailureAndSuccess() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("b0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
blocks.put("b1", null);
blocks.put("b2", new NioManagedBuffer(ByteBuffer.wrap(new byte[21])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new OpenBlocks("app-id", "exec-id", blockIds),
conf);
// We may call both success and failure for the same block.
verify(listener, times(1)).onBlockFetchSuccess("b0", blocks.get("b0"));
verify(listener, times(1)).onBlockFetchFailure(eq("b1"), any());
verify(listener, times(1)).onBlockFetchSuccess("b2", blocks.get("b2"));
verify(listener, times(1)).onBlockFetchFailure(eq("b2"), any());
}
@Test
public void testEmptyBlockFetch() {
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> fetchBlocks(new LinkedHashMap<>(), new String[] {},
new OpenBlocks("app-id", "exec-id", new String[] {}), conf));
assertEquals("Zero-sized blockIds array", e.getMessage());
}
@Test
public void testFetchShuffleBlocksOrder() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("shuffle_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new byte[1])));
blocks.put("shuffle_0_2_1", new NioManagedBuffer(ByteBuffer.wrap(new byte[2])));
blocks.put("shuffle_0_10_2", new NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[3])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new FetchShuffleBlocks("app-id", "exec-id", 0,
new long[]{0, 2, 10}, new int[][]{{0}, {1}, {2}}, false),
conf);
for (String blockId : blockIds) {
verify(listener).onBlockFetchSuccess(blockId, blocks.get(blockId));
}
}
@Test
public void testBatchFetchShuffleBlocksOrder() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("shuffle_0_0_1_2", new NioManagedBuffer(ByteBuffer.wrap(new byte[1])));
blocks.put("shuffle_0_2_2_3", new NioManagedBuffer(ByteBuffer.wrap(new byte[2])));
blocks.put("shuffle_0_10_3_4", new NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[3])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(
blocks,
blockIds,
new FetchShuffleBlocks("app-id", "exec-id", 0,
new long[]{0, 2, 10}, new int[][]{{1, 2}, {2, 3}, {3, 4}}, true),
conf);
for (String blockId : blockIds) {
verify(listener).onBlockFetchSuccess(blockId, blocks.get(blockId));
}
}
@Test
public void testShuffleBlockChunksFetch() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("shuffleChunk_0_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
blocks.put("shuffleChunk_0_0_0_1", new NioManagedBuffer(ByteBuffer.wrap(new byte[23])));
blocks.put("shuffleChunk_0_0_0_2",
new NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[23])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(blocks, blockIds,
new FetchShuffleBlockChunks("app-id", "exec-id", 0, 0, new int[] { 0 },
new int[][] {{ 0, 1, 2 }}), conf);
for (int i = 0; i < 3; i ++) {
verify(listener, times(1)).onBlockFetchSuccess("shuffleChunk_0_0_0_" + i,
blocks.get("shuffleChunk_0_0_0_" + i));
}
}
@Test
public void testShuffleBlockChunkFetchFailure() {
LinkedHashMap<String, ManagedBuffer> blocks = new LinkedHashMap<>();
blocks.put("shuffleChunk_0_0_0_0", new NioManagedBuffer(ByteBuffer.wrap(new byte[12])));
blocks.put("shuffleChunk_0_0_0_1", null);
blocks.put("shuffleChunk_0_0_0_2",
new NettyManagedBuffer(Unpooled.wrappedBuffer(new byte[23])));
String[] blockIds = blocks.keySet().toArray(new String[blocks.size()]);
BlockFetchingListener listener = fetchBlocks(blocks, blockIds,
new FetchShuffleBlockChunks("app-id", "exec-id", 0, 0, new int[]{0}, new int[][]{{0, 1, 2}}),
conf);
verify(listener, times(1)).onBlockFetchSuccess("shuffleChunk_0_0_0_0",
blocks.get("shuffleChunk_0_0_0_0"));
verify(listener, times(1)).onBlockFetchFailure(eq("shuffleChunk_0_0_0_1"), any());
verify(listener, times(1)).onBlockFetchSuccess("shuffleChunk_0_0_0_2",
blocks.get("shuffleChunk_0_0_0_2"));
}
@Test
public void testInvalidShuffleBlockIds() {
assertThrows(IllegalArgumentException.class, () -> fetchBlocks(new LinkedHashMap<>(),
new String[]{"shuffle_0_0"},
new FetchShuffleBlocks("app-id", "exec-id", 0, new long[] { 0 },
new int[][] {{ 0 }}, false), conf));
assertThrows(IllegalArgumentException.class, () -> fetchBlocks(new LinkedHashMap<>(),
new String[]{"shuffleChunk_0_0_0_0_0"},
new FetchShuffleBlockChunks("app-id", "exec-id", 0, 0, new int[] { 0 },
new int[][] {{ 0 }}), conf));
}
/**
* Begins a fetch on the given set of blocks by mocking out the server side of the RPC which
* simply returns the given (BlockId, Block) pairs.
* As "blocks" is a LinkedHashMap, the blocks are guaranteed to be returned in the same order
* that they were inserted in.
*
* If a block's buffer is "null", an exception will be thrown instead.
*/
private static BlockFetchingListener fetchBlocks(
LinkedHashMap<String, ManagedBuffer> blocks,
String[] blockIds,
BlockTransferMessage expectMessage,
TransportConf transportConf) {
TransportClient client = mock(TransportClient.class);
BlockFetchingListener listener = mock(BlockFetchingListener.class);
OneForOneBlockFetcher fetcher =
new OneForOneBlockFetcher(client, "app-id", "exec-id", blockIds, listener, transportConf);
// Respond to the "OpenBlocks" message with an appropriate ShuffleStreamHandle with streamId 123
doAnswer(invocationOnMock -> {
BlockTransferMessage message = BlockTransferMessage.Decoder.fromByteBuffer(
(ByteBuffer) invocationOnMock.getArguments()[0]);
RpcResponseCallback callback = (RpcResponseCallback) invocationOnMock.getArguments()[1];
callback.onSuccess(new StreamHandle(123, blocks.size()).toByteBuffer());
assertEquals(expectMessage, message);
return null;
}).when(client).sendRpc(any(ByteBuffer.class), any(RpcResponseCallback.class));
// Respond to each chunk request with a single buffer from our blocks array.
AtomicInteger expectedChunkIndex = new AtomicInteger(0);
Iterator<ManagedBuffer> blockIterator = blocks.values().iterator();
doAnswer(invocation -> {
try {
long streamId = (Long) invocation.getArguments()[0];
int myChunkIndex = (Integer) invocation.getArguments()[1];
assertEquals(123, streamId);
assertEquals(expectedChunkIndex.getAndIncrement(), myChunkIndex);
ChunkReceivedCallback callback = (ChunkReceivedCallback) invocation.getArguments()[2];
ManagedBuffer result = blockIterator.next();
if (result != null) {
callback.onSuccess(myChunkIndex, result);
} else {
callback.onFailure(myChunkIndex, new RuntimeException("Failed " + myChunkIndex));
}
} catch (Exception e) {
e.printStackTrace();
fail("Unexpected failure");
}
return null;
}).when(client).fetchChunk(anyLong(), anyInt(), any());
fetcher.start();
return listener;
}
}
| OneForOneBlockFetcherSuite |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/UnknownServerException.java | {
"start": 998,
"end": 1424
} | class ____ extends ApiException {
private static final long serialVersionUID = 1L;
public UnknownServerException() {
}
public UnknownServerException(String message) {
super(message);
}
public UnknownServerException(Throwable cause) {
super(cause);
}
public UnknownServerException(String message, Throwable cause) {
super(message, cause);
}
}
| UnknownServerException |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/array/H2ArrayToStringFunction.java | {
"start": 1148,
"end": 5482
} | class ____ extends ArrayToStringFunction {
private final int maximumArraySize;
public H2ArrayToStringFunction(int maximumArraySize, TypeConfiguration typeConfiguration) {
super( typeConfiguration );
this.maximumArraySize = maximumArraySize;
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
final Expression arrayExpression = (Expression) sqlAstArguments.get( 0 );
final Expression separatorExpression = (Expression) sqlAstArguments.get( 1 );
final Expression defaultExpression = sqlAstArguments.size() > 2 ? (Expression) sqlAstArguments.get( 2 ) : null;
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) arrayExpression.getExpressionType().getSingleJdbcMapping();
final int ddlTypeCode = pluralType.getElementType().getJdbcType().getDdlTypeCode();
final boolean needsCast = !SqlTypes.isStringType( ddlTypeCode );
if ( arrayExpression instanceof SelfRenderingOrderedSetAggregateFunctionSqlAstExpression<?> functionExpression
&& ArrayAggFunction.FUNCTION_NAME.equals( functionExpression.getFunctionName() ) ) {
// When the array argument is an aggregate expression, we access its contents directly
final Expression arrayElementExpression = (Expression) functionExpression.getArguments().get( 0 );
final List<SortSpecification> withinGroup = functionExpression.getWithinGroup();
final Predicate filter = functionExpression.getFilter();
sqlAppender.append( "listagg(" );
if ( defaultExpression != null ) {
sqlAppender.append( "coalesce(" );
}
if ( needsCast ) {
if ( ddlTypeCode == SqlTypes.BOOLEAN ) {
// By default, H2 uses upper case, so lower it for a consistent experience
sqlAppender.append( "lower(" );
}
sqlAppender.append( "cast(" );
}
arrayElementExpression.accept( walker );
if ( needsCast ) {
sqlAppender.append( " as varchar)" );
if ( ddlTypeCode == SqlTypes.BOOLEAN ) {
sqlAppender.append( ')' );
}
}
if ( defaultExpression != null ) {
sqlAppender.append( ',' );
defaultExpression.accept( walker );
sqlAppender.append( ')' );
}
sqlAppender.append( "," );
walker.render( separatorExpression, SqlAstNodeRenderingMode.DEFAULT );
sqlAppender.appendSql( ')' );
if ( withinGroup != null && !withinGroup.isEmpty() ) {
walker.getCurrentClauseStack().push( Clause.WITHIN_GROUP );
sqlAppender.appendSql( " within group (order by " );
withinGroup.get( 0 ).accept( walker );
for ( int i = 1; i < withinGroup.size(); i++ ) {
sqlAppender.appendSql( ',' );
withinGroup.get( i ).accept( walker );
}
sqlAppender.appendSql( ')' );
walker.getCurrentClauseStack().pop();
}
if ( filter != null ) {
walker.getCurrentClauseStack().push( Clause.WHERE );
sqlAppender.appendSql( " filter (where " );
filter.accept( walker );
sqlAppender.appendSql( ')' );
walker.getCurrentClauseStack().pop();
}
}
else {
sqlAppender.append( "case when " );
arrayExpression.accept( walker );
sqlAppender.append( " is not null then coalesce((select listagg(" );
if ( defaultExpression != null ) {
sqlAppender.append( "coalesce(" );
}
if ( needsCast ) {
if ( ddlTypeCode == SqlTypes.BOOLEAN ) {
// By default, H2 uses upper case, so lower it for a consistent experience
sqlAppender.append( "lower(" );
}
sqlAppender.append( "cast(" );
}
sqlAppender.append( "array_get(" );
arrayExpression.accept( walker );
sqlAppender.append( ",i.idx)" );
if ( needsCast ) {
sqlAppender.append( " as varchar)" );
if ( ddlTypeCode == SqlTypes.BOOLEAN ) {
sqlAppender.append( ')' );
}
}
if ( defaultExpression != null ) {
sqlAppender.append( ',' );
defaultExpression.accept( walker );
sqlAppender.append( ')' );
}
sqlAppender.append( "," );
walker.render( separatorExpression, SqlAstNodeRenderingMode.DEFAULT );
sqlAppender.append( ") within group (order by i.idx) from system_range(1," );
sqlAppender.append( Integer.toString( maximumArraySize ) );
sqlAppender.append( ") i(idx) where i.idx<=coalesce(cardinality(" );
arrayExpression.accept( walker );
sqlAppender.append( "),0)),'') end" );
}
}
}
| H2ArrayToStringFunction |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsConfig.java | {
"start": 1979,
"end": 16973
} | class ____ implements ToXContentObject, Writeable {
public static final String BLANK_ID = "blank_data_frame_id";
public static final String BLANK_DEST_INDEX = "blank_dest_index";
public static final String TYPE = "data_frame_analytics_config";
public static final ByteSizeValue DEFAULT_MODEL_MEMORY_LIMIT = ByteSizeValue.ofGb(1);
public static final ByteSizeValue MIN_MODEL_MEMORY_LIMIT = ByteSizeValue.ofKb(1);
/**
* This includes the overhead of thread stacks and data structures that the program might use that
* are not instrumented. But it does NOT include the memory used by loading the executable code.
*/
public static final ByteSizeValue PROCESS_MEMORY_OVERHEAD = ByteSizeValue.ofMb(5);
public static final ParseField ID = new ParseField("id");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField SOURCE = new ParseField("source");
public static final ParseField DEST = new ParseField("dest");
public static final ParseField ANALYSIS = new ParseField("analysis");
public static final ParseField CONFIG_TYPE = new ParseField("config_type");
public static final ParseField ANALYZED_FIELDS = new ParseField("analyzed_fields");
public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit");
public static final ParseField HEADERS = new ParseField("headers");
public static final ParseField CREATE_TIME = new ParseField("create_time");
public static final ParseField VERSION = new ParseField("version");
public static final ParseField ALLOW_LAZY_START = new ParseField("allow_lazy_start");
public static final ParseField MAX_NUM_THREADS = new ParseField("max_num_threads");
public static final ParseField META = new ParseField("_meta");
public static final ObjectParser<Builder, Void> STRICT_PARSER = createParser(false);
public static final ObjectParser<Builder, Void> LENIENT_PARSER = createParser(true);
private static ObjectParser<Builder, Void> createParser(boolean ignoreUnknownFields) {
ObjectParser<Builder, Void> parser = new ObjectParser<>(TYPE, ignoreUnknownFields, Builder::new);
parser.declareString((c, s) -> {}, CONFIG_TYPE);
parser.declareString(Builder::setId, ID);
parser.declareString(Builder::setDescription, DESCRIPTION);
parser.declareObject(Builder::setSource, DataFrameAnalyticsSource.createParser(ignoreUnknownFields), SOURCE);
parser.declareObject(Builder::setDest, DataFrameAnalyticsDest.createParser(ignoreUnknownFields), DEST);
parser.declareObject(Builder::setAnalysis, (p, c) -> parseAnalysis(p, ignoreUnknownFields), ANALYSIS);
parser.declareField(
Builder::setAnalyzedFields,
(p, c) -> FetchSourceContext.fromXContent(p),
ANALYZED_FIELDS,
OBJECT_ARRAY_BOOLEAN_OR_STRING
);
parser.declareField(
Builder::setModelMemoryLimit,
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()),
MODEL_MEMORY_LIMIT,
VALUE
);
parser.declareBoolean(Builder::setAllowLazyStart, ALLOW_LAZY_START);
parser.declareInt(Builder::setMaxNumThreads, MAX_NUM_THREADS);
parser.declareObject(Builder::setMeta, (p, c) -> p.mapOrdered(), META);
if (ignoreUnknownFields) {
// Headers are not parsed by the strict (config) parser, so headers supplied in the _body_ of a REST request will be rejected.
// (For config, headers are explicitly transferred from the auth headers by code in the put data frame actions.)
parser.declareObject(Builder::setHeaders, (p, c) -> p.mapStrings(), HEADERS);
// Creation time is set automatically during PUT, so create_time supplied in the _body_ of a REST request will be rejected.
parser.declareField(
Builder::setCreateTime,
p -> TimeUtils.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()),
CREATE_TIME,
ObjectParser.ValueType.VALUE
);
// Version is set automatically during PUT, so version supplied in the _body_ of a REST request will be rejected.
parser.declareString(Builder::setVersion, MlConfigVersion::fromString, VERSION);
}
return parser;
}
private static DataFrameAnalysis parseAnalysis(XContentParser parser, boolean ignoreUnknownFields) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser);
DataFrameAnalysis analysis = parser.namedObject(DataFrameAnalysis.class, parser.currentName(), ignoreUnknownFields);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser);
return analysis;
}
private final String id;
private final String description;
private final DataFrameAnalyticsSource source;
private final DataFrameAnalyticsDest dest;
private final DataFrameAnalysis analysis;
private final FetchSourceContext analyzedFields;
/**
* This may be null up to the point of persistence, as the relationship with <code>xpack.ml.max_model_memory_limit</code>
* depends on whether the user explicitly set the value or if the default was requested. <code>null</code> indicates
* the default was requested, which in turn means a default higher than the maximum is silently capped.
* A non-<code>null</code> value higher than <code>xpack.ml.max_model_memory_limit</code> will cause a
* validation error even if it is equal to the default value. This behaviour matches what is done in
* {@link org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits}.
*/
private final ByteSizeValue modelMemoryLimit;
private final Map<String, String> headers;
private final Instant createTime;
private final MlConfigVersion version;
private final boolean allowLazyStart;
private final int maxNumThreads;
private final Map<String, Object> meta;
private DataFrameAnalyticsConfig(
String id,
String description,
DataFrameAnalyticsSource source,
DataFrameAnalyticsDest dest,
DataFrameAnalysis analysis,
Map<String, String> headers,
ByteSizeValue modelMemoryLimit,
FetchSourceContext analyzedFields,
Instant createTime,
MlConfigVersion version,
boolean allowLazyStart,
Integer maxNumThreads,
Map<String, Object> meta
) {
this.id = ExceptionsHelper.requireNonNull(id, ID);
this.description = description;
this.source = ExceptionsHelper.requireNonNull(source, SOURCE);
this.dest = ExceptionsHelper.requireNonNull(dest, DEST);
this.analysis = ExceptionsHelper.requireNonNull(analysis, ANALYSIS);
this.analyzedFields = analyzedFields;
this.modelMemoryLimit = modelMemoryLimit;
this.headers = Collections.unmodifiableMap(headers);
this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli());
this.version = version;
this.allowLazyStart = allowLazyStart;
if (maxNumThreads != null && maxNumThreads < 1) {
throw ExceptionsHelper.badRequestException("[{}] must be a positive integer", MAX_NUM_THREADS.getPreferredName());
}
this.maxNumThreads = maxNumThreads == null ? 1 : maxNumThreads;
this.meta = meta == null ? null : Collections.unmodifiableMap(meta);
}
public DataFrameAnalyticsConfig(StreamInput in) throws IOException {
this.id = in.readString();
this.description = in.readOptionalString();
this.source = new DataFrameAnalyticsSource(in);
this.dest = new DataFrameAnalyticsDest(in);
this.analysis = in.readNamedWriteable(DataFrameAnalysis.class);
this.analyzedFields = in.readOptionalWriteable(FetchSourceContext::readFrom);
this.modelMemoryLimit = in.readOptionalWriteable(ByteSizeValue::readFrom);
this.headers = in.readImmutableMap(StreamInput::readString);
this.createTime = in.readOptionalInstant();
this.version = in.readBoolean() ? MlConfigVersion.readVersion(in) : null;
this.allowLazyStart = in.readBoolean();
this.maxNumThreads = in.readVInt();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
Map<String, Object> readMeta = in.readGenericMap();
this.meta = readMeta == null ? null : Collections.unmodifiableMap(readMeta);
} else {
this.meta = null;
}
}
public String getId() {
return id;
}
public String getDescription() {
return description;
}
public DataFrameAnalyticsSource getSource() {
return source;
}
public DataFrameAnalyticsDest getDest() {
return dest;
}
public DataFrameAnalysis getAnalysis() {
return analysis;
}
public FetchSourceContext getAnalyzedFields() {
return analyzedFields;
}
public ByteSizeValue getModelMemoryLimit() {
return modelMemoryLimit != null ? modelMemoryLimit : DEFAULT_MODEL_MEMORY_LIMIT;
}
public Map<String, String> getHeaders() {
return headers;
}
public Instant getCreateTime() {
return createTime;
}
public MlConfigVersion getVersion() {
return version;
}
public boolean isAllowLazyStart() {
return allowLazyStart;
}
public Integer getMaxNumThreads() {
return maxNumThreads;
}
public Map<String, Object> getMeta() {
return meta;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
final boolean forInternalStorage = params.paramAsBoolean(ToXContentParams.FOR_INTERNAL_STORAGE, false);
builder.startObject();
builder.field(ID.getPreferredName(), id);
if (params.paramAsBoolean(EXCLUDE_GENERATED, false) == false) {
if (createTime != null) {
builder.timestampFieldsFromUnixEpochMillis(
CREATE_TIME.getPreferredName(),
CREATE_TIME.getPreferredName() + "_string",
createTime.toEpochMilli()
);
}
if (version != null) {
builder.field(VERSION.getPreferredName(), version);
}
if (headers.isEmpty() == false) {
if (forInternalStorage) {
assertNoAuthorizationHeader(headers);
builder.field(HEADERS.getPreferredName(), headers);
} else {
XContentUtils.addAuthorizationInfo(builder, headers);
}
}
if (forInternalStorage) {
builder.field(CONFIG_TYPE.getPreferredName(), TYPE);
}
}
if (description != null) {
builder.field(DESCRIPTION.getPreferredName(), description);
}
builder.field(SOURCE.getPreferredName(), source);
builder.field(DEST.getPreferredName(), dest);
builder.startObject(ANALYSIS.getPreferredName());
builder.field(
analysis.getWriteableName(),
analysis,
new MapParams(Collections.singletonMap(VERSION.getPreferredName(), version == null ? null : version.toString()))
);
builder.endObject();
if (analyzedFields != null) {
builder.field(ANALYZED_FIELDS.getPreferredName(), analyzedFields);
}
builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), getModelMemoryLimit().getStringRep());
builder.field(ALLOW_LAZY_START.getPreferredName(), allowLazyStart);
builder.field(MAX_NUM_THREADS.getPreferredName(), maxNumThreads);
if (meta != null) {
builder.field(META.getPreferredName(), meta);
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(id);
out.writeOptionalString(description);
source.writeTo(out);
dest.writeTo(out);
out.writeNamedWriteable(analysis);
out.writeOptionalWriteable(analyzedFields);
out.writeOptionalWriteable(modelMemoryLimit);
out.writeMap(headers, StreamOutput::writeString);
out.writeOptionalInstant(createTime);
if (version != null) {
out.writeBoolean(true);
MlConfigVersion.writeVersion(version, out);
} else {
out.writeBoolean(false);
}
out.writeBoolean(allowLazyStart);
out.writeVInt(maxNumThreads);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
out.writeGenericMap(meta);
}
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
DataFrameAnalyticsConfig other = (DataFrameAnalyticsConfig) o;
return Objects.equals(id, other.id)
&& Objects.equals(description, other.description)
&& Objects.equals(source, other.source)
&& Objects.equals(dest, other.dest)
&& Objects.equals(analysis, other.analysis)
&& Objects.equals(headers, other.headers)
&& Objects.equals(getModelMemoryLimit(), other.getModelMemoryLimit())
&& Objects.equals(analyzedFields, other.analyzedFields)
&& Objects.equals(createTime, other.createTime)
&& Objects.equals(version, other.version)
&& Objects.equals(allowLazyStart, other.allowLazyStart)
&& maxNumThreads == other.maxNumThreads
&& Objects.equals(meta, other.meta);
}
@Override
public int hashCode() {
return Objects.hash(
id,
description,
source,
dest,
analysis,
headers,
getModelMemoryLimit(),
analyzedFields,
createTime,
version,
allowLazyStart,
maxNumThreads,
meta
);
}
@Override
public String toString() {
return Strings.toString(this);
}
public static String documentId(String id) {
return TYPE + "-" + id;
}
/**
* Returns the job id from the doc id. Returns {@code null} if the doc id is invalid.
*/
@Nullable
public static String extractJobIdFromDocId(String docId) {
String jobId = docId.replaceAll("^" + TYPE + "-", "");
return jobId.equals(docId) ? null : jobId;
}
public static | DataFrameAnalyticsConfig |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/filter/mysql8datetime/MySQL8DateTimeResultSetMetaData.java | {
"start": 356,
"end": 4592
} | class ____ implements ResultSetMetaData {
private ResultSetMetaData resultSetMetaData;
public MySQL8DateTimeResultSetMetaData(ResultSetMetaData resultSetMetaData) {
super();
this.resultSetMetaData = resultSetMetaData;
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return resultSetMetaData.unwrap(iface);
}
@Override
public int getColumnCount() throws SQLException {
return resultSetMetaData.getColumnCount();
}
@Override
public boolean isAutoIncrement(int column) throws SQLException {
return resultSetMetaData.isAutoIncrement(column);
}
@Override
public boolean isCaseSensitive(int column) throws SQLException {
return resultSetMetaData.isCaseSensitive(column);
}
@Override
public boolean isSearchable(int column) throws SQLException {
return resultSetMetaData.isSearchable(column);
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return resultSetMetaData.isWrapperFor(iface);
}
@Override
public boolean isCurrency(int column) throws SQLException {
return resultSetMetaData.isCurrency(column);
}
@Override
public int isNullable(int column) throws SQLException {
return resultSetMetaData.isNullable(column);
}
@Override
public boolean isSigned(int column) throws SQLException {
return resultSetMetaData.isSigned(column);
}
@Override
public int getColumnDisplaySize(int column) throws SQLException {
return resultSetMetaData.getColumnDisplaySize(column);
}
@Override
public String getColumnLabel(int column) throws SQLException {
return resultSetMetaData.getColumnLabel(column);
}
@Override
public String getColumnName(int column) throws SQLException {
return resultSetMetaData.getColumnName(column);
}
@Override
public String getSchemaName(int column) throws SQLException {
return resultSetMetaData.getSchemaName(column);
}
@Override
public int getPrecision(int column) throws SQLException {
return resultSetMetaData.getPrecision(column);
}
@Override
public int getScale(int column) throws SQLException {
return resultSetMetaData.getScale(column);
}
@Override
public String getTableName(int column) throws SQLException {
return resultSetMetaData.getTableName(column);
}
@Override
public String getCatalogName(int column) throws SQLException {
return resultSetMetaData.getCatalogName(column);
}
@Override
public int getColumnType(int column) throws SQLException {
return resultSetMetaData.getColumnType(column);
}
@Override
public String getColumnTypeName(int column) throws SQLException {
return resultSetMetaData.getColumnTypeName(column);
}
@Override
public boolean isReadOnly(int column) throws SQLException {
return resultSetMetaData.isReadOnly(column);
}
@Override
public boolean isWritable(int column) throws SQLException {
return resultSetMetaData.isWritable(column);
}
@Override
public boolean isDefinitelyWritable(int column) throws SQLException {
return resultSetMetaData.isDefinitelyWritable(column);
}
/**
* 针对8.0.24版本开始,如果把mysql DATETIME映射回Timestamp,就需要把javaClass的类型也改回去
* 相关类在com.mysql.cj.MysqlType 中
* 旧版本jdbc为
* DATETIME("DATETIME", Types.TIMESTAMP, Timestamp.class, 0, MysqlType.IS_NOT_DECIMAL, 26L, "[(fsp)]"),
* 8.0.24及以上版本jdbc实现改为
* DATETIME("DATETIME", Types.TIMESTAMP, LocalDateTime.class, 0, MysqlType.IS_NOT_DECIMAL, 26L, "[(fsp)]"),
* @param column 列的索引位
* @return 列名称
* @see java.sql.ResultSetMetaData#getColumnClassName(int)
* @throws SQLException 如果发生数据库访问错误
*/
@Override
public String getColumnClassName(int column) throws SQLException {
String className = resultSetMetaData.getColumnClassName(column);
if (LocalDateTime.class.getName().equals(className)) {
return Timestamp.class.getName();
}
return className;
}
}
| MySQL8DateTimeResultSetMetaData |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/ClassUtils.java | {
"start": 18788,
"end": 19024
} | class ____ a primitive wrapper class
*/
public static boolean isPrimitiveWrapper(Class<?> clazz) {
Assert.notNull(clazz, "Class must not be null");
return primitiveWrapperTypeMap.containsKey(clazz);
}
/**
* Check if the given | is |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/io/CharTypes.java | {
"start": 71,
"end": 11522
} | class ____
{
protected final static char[] HC = "0123456789ABCDEF".toCharArray();
protected final static char[] HClower = "0123456789abcdef".toCharArray();
protected final static byte[] HB;
protected final static byte[] HBlower;
static {
int len = HC.length;
HB = new byte[len];
HBlower = new byte[len];
for (int i = 0; i < len; ++i) {
HB[i] = (byte) HC[i];
HBlower[i] = (byte) HClower[i];
}
}
/**
* Lookup table used for determining which input characters
* need special handling when contained in text segment.
*/
protected final static int[] sInputCodes;
static {
// 96 would do for most cases (backslash is ASCII 94)
// but if we want to do lookups by raw bytes it's better
// to have full table
final int[] table = new int[256];
// Control chars and non-space white space are not allowed unquoted
for (int i = 0; i < 32; ++i) {
table[i] = -1;
}
// And then string end and quote markers are special too
table['"'] = 1;
table['\\'] = 1;
sInputCodes = table;
}
/**
* Additionally we can combine UTF-8 decoding info into similar
* data table.
*/
protected final static int[] sInputCodesUTF8;
static {
final int[] table = Arrays.copyOf(sInputCodes, sInputCodes.length);
for (int c = 128; c < 256; ++c) {
int code;
// We'll add number of bytes needed for decoding
if ((c & 0xE0) == 0xC0) { // 2 bytes (0x0080 - 0x07FF)
code = 2;
} else if ((c & 0xF0) == 0xE0) { // 3 bytes (0x0800 - 0xFFFF)
code = 3;
} else if ((c & 0xF8) == 0xF0) {
// 4 bytes; double-char with surrogates and all...
code = 4;
} else {
// And -1 seems like a good "universal" error marker...
code = -1;
}
table[c] = code;
}
sInputCodesUTF8 = table;
}
/**
* To support non-default (and non-standard) unquoted Object Property names mode,
* need to have alternate checking.
* Basically this is list of 8-bit ASCII characters that are legal
* as part of Javascript identifier
*/
protected final static int[] sInputCodesJsNames;
static {
final int[] table = new int[256];
// Default is "not a name char", mark ones that are
Arrays.fill(table, -1);
// Assume rules with JS same as Java (change if/as needed)
for (int i = 33; i < 256; ++i) {
if (Character.isJavaIdentifierPart((char) i)) {
table[i] = 0;
}
}
// Also: '@', '#' and '*' are also to be accepted as well.
// And '-' (for hyphenated names); and '+' for sake of symmetricity...
table['@'] = 0;
table['#'] = 0;
table['*'] = 0;
table['-'] = 0;
table['+'] = 0;
sInputCodesJsNames = table;
}
/**
* This table is similar to Latin-1, except that it marks all "high-bit"
* code as ok. They will be validated at a later point, when decoding
* name
*/
protected final static int[] sInputCodesUtf8JsNames;
static {
// start with 8-bit JS names
final int[] table = Arrays.copyOf(sInputCodesJsNames, sInputCodesJsNames.length);
Arrays.fill(table, 128, 128, 0);
sInputCodesUtf8JsNames = table;
}
/**
* Decoding table used to quickly determine characters that are
* relevant within comment content.
*/
protected final static int[] sInputCodesComment;
static {
final int[] buf = new int[256];
// but first: let's start with UTF-8 multi-byte markers:
System.arraycopy(sInputCodesUTF8, 128, buf, 128, 128);
// default (0) means "ok" (skip); -1 invalid, others marked by char itself
Arrays.fill(buf, 0, 32, -1); // invalid white space
buf['\t'] = 0; // tab is still fine
buf['\n'] = '\n'; // lf/cr need to be observed, ends cpp comment
buf['\r'] = '\r';
buf['*'] = '*'; // end marker for c-style comments
sInputCodesComment = buf;
}
/**
* Decoding table used for skipping white space and comments.
*
* @since 2.3
*/
protected final static int[] sInputCodesWS;
static {
// but first: let's start with UTF-8 multi-byte markers:
final int[] buf = new int[256];
System.arraycopy(sInputCodesUTF8, 128, buf, 128, 128);
// default (0) means "not whitespace" (end); 1 "whitespace", -1 invalid,
// 2-4 UTF-8 multi-bytes, others marked by char itself
//
Arrays.fill(buf, 0, 32, -1); // invalid white space
buf[' '] = 1;
buf['\t'] = 1;
buf['\n'] = '\n'; // lf/cr need to be observed, ends cpp comment
buf['\r'] = '\r';
buf['/'] = '/'; // start marker for c/cpp comments
buf['#'] = '#'; // start marker for YAML comments
sInputCodesWS = buf;
}
/**
* Lookup table used for determining which output characters in
* 7-bit ASCII range need to be quoted.
*/
protected final static int[] sOutputEscapes128NoSlash;
static {
int[] table = new int[128];
// Control chars need generic escape sequence
for (int i = 0; i < 32; ++i) {
// 04-Mar-2011, tatu: Used to use "-(i + 1)", replaced with constant
table[i] = CharacterEscapes.ESCAPE_STANDARD;
}
// Others (and some within that range too) have explicit shorter sequences
table['"'] = '"';
table['\\'] = '\\';
// Escaping of slash is optional, so let's not add it
table[0x08] = 'b';
table[0x09] = 't';
table[0x0C] = 'f';
table[0x0A] = 'n';
table[0x0D] = 'r';
sOutputEscapes128NoSlash = table;
}
/**
* Lookup table same as {@link #sOutputEscapes128NoSlash} except that
* forward slash ('/') is also escaped
*/
protected final static int[] sOutputEscapes128WithSlash;
static {
sOutputEscapes128WithSlash = Arrays.copyOf(sOutputEscapes128NoSlash, sOutputEscapes128NoSlash.length);
sOutputEscapes128WithSlash['/'] = '/';
}
/**
* Lookup table for the first 256 Unicode characters (ASCII / UTF-8)
* range. For actual hex digits, contains corresponding value;
* for others -1.
*<p>
* NOTE: before 2.10.1, was of size 128, extended for simpler handling
*/
protected final static int[] sHexValues = new int[256];
static {
Arrays.fill(sHexValues, -1);
for (int i = 0; i < 10; ++i) {
sHexValues['0' + i] = i;
}
for (int i = 0; i < 6; ++i) {
sHexValues['a' + i] = 10 + i;
sHexValues['A' + i] = 10 + i;
}
}
public static int[] getInputCodeLatin1() { return sInputCodes; }
public static int[] getInputCodeUtf8() { return sInputCodesUTF8; }
public static int[] getInputCodeLatin1JsNames() { return sInputCodesJsNames; }
public static int[] getInputCodeUtf8JsNames() { return sInputCodesUtf8JsNames; }
public static int[] getInputCodeComment() { return sInputCodesComment; }
public static int[] getInputCodeWS() { return sInputCodesWS; }
/**
* Accessor for getting a read-only encoding table for first 128 Unicode
* code points (single-byte UTF-8 characters).
* Value of 0 means "no escaping"; other positive values that value is character
* to use after backslash; and negative values that generic (backslash - u)
* escaping is to be used.
*
* @return 128-entry {@code int[]} that contains escape definitions
*/
public static int[] get7BitOutputEscapes() {
// 11-Aug-2025, tatu: Note! 3.x still defaults to NOT escaping forward slash
// by defeault
return get7BitOutputEscapes('"', false);
}
/**
* Alternative to {@link #get7BitOutputEscapes()} when either a non-standard
* quote character is used, or forward slash is to be escaped.
*
* @param quoteChar Character used for quoting textual values and property names;
* usually double-quote but sometimes changed to single-quote (apostrophe)
* @param escapeSlash Whether forward slash ({@code "/"}) is escaped by default
* or not.
*
* @return 128-entry {@code int[]} that contains escape definitions
*
* @since 2.17
*/
public static int[] get7BitOutputEscapes(int quoteChar, boolean escapeSlash) {
if (quoteChar == '"') {
if (escapeSlash) {
return sOutputEscapes128WithSlash;
}
return sOutputEscapes128NoSlash;
}
return AltQuoteEscapes.instance.altEscapesFor(quoteChar, escapeSlash);
}
public static int charToHex(int ch)
{
// 08-Nov-2019, tatu: As per [core#540] and [core#578], changed to
// force masking here so caller need not do that.
return sHexValues[ch & 0xFF];
}
// @since 2.13
public static char hexToChar(int ch)
{
return HC[ch];
}
/**
* Helper method for appending JSON-escaped version of contents
* into specific {@link StringBuilder}, using default JSON specification
* mandated minimum escaping rules.
*
* @param sb Buffer to append escaped contents in
*
* @param content Unescaped String value to append with escaping applied
*/
public static void appendQuoted(StringBuilder sb, String content) {
final int[] escCodes = sOutputEscapes128WithSlash;
final int escLen = escCodes.length;
for (int i = 0, len = content.length(); i < len; ++i) {
char c = content.charAt(i);
if (c >= escLen || escCodes[c] == 0) {
sb.append(c);
continue;
}
sb.append('\\');
int escCode = escCodes[c];
if (escCode < 0) { // generic quoting (hex value)
// The only negative value sOutputEscapes128 returns
// is CharacterEscapes.ESCAPE_STANDARD, which mean
// appendQuotes should encode using the Unicode encoding;
// not sure if this is the right way to encode for
// CharacterEscapes.ESCAPE_CUSTOM or other (future)
// CharacterEscapes.ESCAPE_XXX values.
// We know that it has to fit in just 2 hex chars
sb.append('u');
sb.append('0');
sb.append('0');
int value = c; // widening
sb.append(HC[value >> 4]);
sb.append(HC[value & 0xF]);
} else { // "named", i.e. prepend with slash
sb.append((char) escCode);
}
}
}
public static char[] copyHexChars(boolean uppercase) {
return (uppercase) ? HC.clone() : HClower.clone();
}
public static byte[] copyHexBytes(boolean uppercase) {
return (uppercase) ? HB.clone() : HBlower.clone();
}
/**
* Helper used for lazy initialization of alternative escape (quoting)
* table, used for escaping content that uses non-standard quote
* character (usually apostrophe).
*/
private static | CharTypes |
java | redisson__redisson | redisson/src/main/java/org/redisson/connection/pool/PubSubConnectionPool.java | {
"start": 1148,
"end": 1906
} | class ____ extends ConnectionPool<RedisPubSubConnection> {
public PubSubConnectionPool(MasterSlaveServersConfig config, ConnectionManager connectionManager, MasterSlaveEntry masterSlaveEntry) {
super(config, connectionManager, masterSlaveEntry);
}
public CompletableFuture<RedisPubSubConnection> get() {
return get(RedisCommands.SUBSCRIBE, false);
}
public CompletableFuture<RedisPubSubConnection> get(ClientConnectionsEntry entry) {
return get(RedisCommands.SUBSCRIBE, entry, false);
}
@Override
protected ConnectionsHolder<RedisPubSubConnection> getConnectionHolder(ClientConnectionsEntry entry, boolean trackChanges) {
return entry.getPubSubConnectionsHolder();
}
}
| PubSubConnectionPool |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/datasource/SingleConnectionDataSource.java | {
"start": 7976,
"end": 11190
} | class ____ {@link AutoCloseable}, it can be used
* with a try-with-resource statement.
* @since 6.1.2
*/
@Override
public void close() {
destroy();
}
/**
* Close the underlying Connection.
* The provider of this DataSource needs to care for proper shutdown.
* <p>As this bean implements {@link DisposableBean}, a bean factory
* will automatically invoke this on destruction of the bean.
*/
@Override
public void destroy() {
this.connectionLock.lock();
try {
if (this.target != null) {
closeConnection(this.target);
}
}
finally {
this.connectionLock.unlock();
}
}
/**
* Initialize the underlying Connection via the DriverManager.
*/
public void initConnection() throws SQLException {
if (getUrl() == null) {
throw new IllegalStateException("'url' property is required for lazily initializing a Connection");
}
this.connectionLock.lock();
try {
if (this.target != null) {
closeConnection(this.target);
}
this.target = getConnectionFromDriver(getUsername(), getPassword());
prepareConnection(this.target);
if (logger.isDebugEnabled()) {
logger.debug("Established shared JDBC Connection: " + this.target);
}
this.connection = (isSuppressClose() ? getCloseSuppressingConnectionProxy(this.target) : this.target);
}
finally {
this.connectionLock.unlock();
}
}
/**
* Reset the underlying shared Connection, to be reinitialized on next access.
*/
public void resetConnection() {
this.connectionLock.lock();
try {
if (this.target != null) {
closeConnection(this.target);
}
this.target = null;
this.connection = null;
}
finally {
this.connectionLock.unlock();
}
}
/**
* Prepare the given Connection before it is exposed.
* <p>The default implementation applies the auto-commit flag, if necessary.
* Can be overridden in subclasses.
* @param con the Connection to prepare
* @see #setAutoCommit
*/
protected void prepareConnection(Connection con) throws SQLException {
Boolean autoCommit = getAutoCommitValue();
if (autoCommit != null && con.getAutoCommit() != autoCommit) {
con.setAutoCommit(autoCommit);
}
}
/**
* Close the underlying shared Connection.
* @since 6.1.2
*/
protected void closeConnection(Connection con) {
if (isRollbackBeforeClose()) {
try {
if (!con.getAutoCommit()) {
con.rollback();
}
}
catch (Throwable ex) {
logger.info("Could not roll back shared JDBC Connection before close", ex);
}
}
try {
con.close();
}
catch (Throwable ex) {
logger.info("Could not close shared JDBC Connection", ex);
}
}
/**
* Wrap the given Connection with a proxy that delegates every method call to it
* but suppresses close calls.
* @param target the original Connection to wrap
* @return the wrapped Connection
*/
protected Connection getCloseSuppressingConnectionProxy(Connection target) {
return (Connection) Proxy.newProxyInstance(
ConnectionProxy.class.getClassLoader(),
new Class<?>[] {ConnectionProxy.class},
new CloseSuppressingInvocationHandler(target));
}
/**
* Invocation handler that suppresses close calls on JDBC Connections.
*/
private static | implements |
java | apache__camel | components/camel-pulsar/src/main/java/org/apache/camel/component/pulsar/PulsarMessageListener.java | {
"start": 1244,
"end": 3393
} | class ____ implements MessageListener<byte[]> {
private final PulsarEndpoint endpoint;
private final PulsarConsumer pulsarConsumer;
public PulsarMessageListener(PulsarEndpoint endpoint, PulsarConsumer pulsarConsumer) {
this.endpoint = endpoint;
this.pulsarConsumer = pulsarConsumer;
}
@Override
public void received(final Consumer<byte[]> consumer, final Message<byte[]> message) {
final Exchange exchange = PulsarMessageUtils.updateExchange(message, pulsarConsumer.createExchange(false));
if (endpoint.getPulsarConfiguration().isAllowManualAcknowledgement()) {
exchange.getIn().setHeader(PulsarMessageHeaders.MESSAGE_RECEIPT,
endpoint.getComponent().getPulsarMessageReceiptFactory()
.newInstance(exchange, message, consumer));
}
processAsync(exchange, consumer, message);
}
private void processAsync(final Exchange exchange, final Consumer<byte[]> consumer, final Message<byte[]> message) {
pulsarConsumer.getAsyncProcessor().process(exchange, doneSync -> {
try {
if (exchange.getException() != null) {
pulsarConsumer.getExceptionHandler().handleException("Error processing exchange", exchange,
exchange.getException());
} else {
try {
acknowledge(consumer, message);
} catch (Exception e) {
pulsarConsumer.getExceptionHandler().handleException("Error processing exchange", exchange,
exchange.getException());
}
}
} finally {
pulsarConsumer.releaseExchange(exchange, false);
}
});
}
private void acknowledge(final Consumer<byte[]> consumer, final Message<byte[]> message)
throws PulsarClientException {
if (!endpoint.getPulsarConfiguration().isAllowManualAcknowledgement()) {
consumer.acknowledge(message.getMessageId());
}
}
}
| PulsarMessageListener |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GoogleDriveEndpointBuilderFactory.java | {
"start": 49490,
"end": 52244
} | interface ____ {
/**
* Google Drive (camel-google-drive)
* Manage files in Google Drive.
*
* Category: file,cloud,api
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-google-drive
*
* Syntax: <code>google-drive:apiName/methodName</code>
*
* Path parameter: apiName (required)
* What kind of operation to perform
* There are 10 enums and the value can be one of: DRIVE_ABOUT,
* DRIVE_CHANGES, DRIVE_CHANNELS, DRIVE_COMMENTS, DRIVE_DRIVES,
* DRIVE_FILES, DRIVE_PERMISSIONS, DRIVE_REPLIES, DRIVE_REVISIONS,
* DRIVE_TEAMDRIVES
*
* Path parameter: methodName (required)
* What sub operation to use for the selected operation
* There are 13 enums and the value can be one of: copy, delete, get,
* getIdForEmail, insert, list, patch, stop, touch, trash, untrash,
* update, watch
*
* @param path apiName/methodName
* @return the dsl builder
*/
default GoogleDriveEndpointBuilder googleDrive(String path) {
return GoogleDriveEndpointBuilderFactory.endpointBuilder("google-drive", path);
}
/**
* Google Drive (camel-google-drive)
* Manage files in Google Drive.
*
* Category: file,cloud,api
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-google-drive
*
* Syntax: <code>google-drive:apiName/methodName</code>
*
* Path parameter: apiName (required)
* What kind of operation to perform
* There are 10 enums and the value can be one of: DRIVE_ABOUT,
* DRIVE_CHANGES, DRIVE_CHANNELS, DRIVE_COMMENTS, DRIVE_DRIVES,
* DRIVE_FILES, DRIVE_PERMISSIONS, DRIVE_REPLIES, DRIVE_REVISIONS,
* DRIVE_TEAMDRIVES
*
* Path parameter: methodName (required)
* What sub operation to use for the selected operation
* There are 13 enums and the value can be one of: copy, delete, get,
* getIdForEmail, insert, list, patch, stop, touch, trash, untrash,
* update, watch
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path apiName/methodName
* @return the dsl builder
*/
default GoogleDriveEndpointBuilder googleDrive(String componentName, String path) {
return GoogleDriveEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static GoogleDriveEndpointBuilder endpointBuilder(String componentName, String path) {
| GoogleDriveBuilders |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java | {
"start": 1274,
"end": 4348
} | interface ____ extends NamedXContentObject, VersionedNamedWriteable {
String DEFAULT_TOP_CLASSES_RESULTS_FIELD = "top_classes";
String DEFAULT_RESULTS_FIELD = "predicted_value";
ParseField RESULTS_FIELD = new ParseField("results_field");
boolean isTargetTypeSupported(TargetType targetType);
/**
* Return a copy of this with the settings updated by the
* values in {@code update}.
* @param update The update to apply
* @return A new updated config
*/
InferenceConfig apply(InferenceConfigUpdate update);
@Override
default TransportVersion getMinimalSupportedVersion() {
return getMinimalSupportedTransportVersion();
}
/**
* All nodes in the cluster must have at least this MlConfigVersion attribute
*/
MlConfigVersion getMinimalSupportedMlConfigVersion();
/**
* All communication in the cluster must use at least this version
*/
TransportVersion getMinimalSupportedTransportVersion();
default boolean requestingImportance() {
return false;
}
String getResultsField();
boolean isAllocateOnly();
default boolean supportsIngestPipeline() {
return true;
}
default boolean supportsPipelineAggregation() {
return true;
}
default boolean supportsSearchRescorer() {
return false;
}
@Nullable
default TrainedModelInput getDefaultInput(TrainedModelType modelType) {
if (modelType == null) {
return null;
}
return modelType.getDefaultInput();
}
default ActionRequestValidationException validateTrainedModelInput(
TrainedModelInput input,
boolean forCreation,
ActionRequestValidationException validationException
) {
if (input != null && input.getFieldNames().isEmpty()) {
validationException = addValidationError("[input.field_names] must not be empty", validationException);
}
if (input != null
&& input.getFieldNames()
.stream()
.filter(s -> s.contains("."))
.flatMap(s -> Arrays.stream(Strings.delimitedListToStringArray(s, ".")))
.anyMatch(String::isEmpty)) {
validationException = addValidationError(
"[input.field_names] must only contain valid dot delimited field names",
validationException
);
}
return validationException;
}
default ElasticsearchStatusException incompatibleUpdateException(String updateName) {
throw ExceptionsHelper.badRequestException(
"Inference config of type [{}] can not be updated with a inference request of type [{}]",
getName(),
updateName
);
}
default License.OperationMode getMinLicenseSupported() {
return ML_API_FEATURE.getMinimumOperationMode();
}
default License.OperationMode getMinLicenseSupportedForAction(RestRequest.Method method) {
return getMinLicenseSupported();
}
}
| InferenceConfig |
java | google__dagger | javatests/dagger/hilt/processor/internal/aliasof/AliasOfProcessorTest.java | {
"start": 4232,
"end": 4941
} | interface ____{}");
HiltCompilerTests.hiltCompiler(scope).compile(subject -> subject.hasErrorCount(0));
}
@Rule public TemporaryFolder tempFolderRule = new TemporaryFolder();
@Test
public void fails_conflictingAliasScope() {
Source scope =
HiltCompilerTests.javaSource(
"test.AliasScope",
"package test;",
"",
"import javax.inject.Scope;",
"import javax.inject.Singleton;",
"import dagger.hilt.android.scopes.ActivityScoped;",
"import dagger.hilt.migration.AliasOf;",
"",
"@Scope",
"@AliasOf({Singleton.class, ActivityScoped.class})",
"public @ | AliasScope |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KGroupedTableImpl.java | {
"start": 1912,
"end": 2014
} | class ____ {@link KGroupedTable}.
*
* @param <K> the key type
* @param <V> the value type
*/
public | of |
java | elastic__elasticsearch | modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java | {
"start": 29379,
"end": 31470
} | class ____ implements RequestWrapper<DeleteRequest> {
private final DeleteRequest request;
DeleteRequestWrapper(DeleteRequest request) {
this.request = Objects.requireNonNull(request, "Wrapped DeleteRequest can not be null");
}
@Override
public void setIndex(String index) {
request.index(index);
}
@Override
public String getIndex() {
return request.index();
}
@Override
public void setId(String id) {
request.id(id);
}
@Override
public String getId() {
return request.id();
}
@Override
public void setVersion(long version) {
request.version(version);
}
@Override
public long getVersion() {
return request.version();
}
@Override
public void setVersionType(VersionType versionType) {
request.versionType(versionType);
}
@Override
public void setRouting(String routing) {
request.routing(routing);
}
@Override
public String getRouting() {
return request.routing();
}
@Override
public Map<String, Object> getSource() {
throw new UnsupportedOperationException("unable to get source from action request [" + request.getClass() + "]");
}
@Override
public void setSource(Map<String, Object> source) {
throw new UnsupportedOperationException("unable to set [source] on action request [" + request.getClass() + "]");
}
@Override
public DeleteRequest self() {
return request;
}
}
/**
* Wraps a {@link DeleteRequest} in a {@link RequestWrapper}
*/
public static RequestWrapper<DeleteRequest> wrap(DeleteRequest request) {
return new DeleteRequestWrapper(request);
}
/**
* Apply a {@link Script} to a {@link RequestWrapper}
*/
public abstract static | DeleteRequestWrapper |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/datastream/CustomSinkOperatorUidHashes.java | {
"start": 2690,
"end": 4576
} | class ____ {
@Nullable String writerUidHash = null;
@Nullable String committerUidHash = null;
@Nullable String globalCommitterUidHash = null;
/**
* Sets the uid hash of the writer operator used to recover state.
*
* @param writerUidHash uid hash denoting writer operator
* @return {@link SinkOperatorUidHashesBuilder}
*/
public SinkOperatorUidHashesBuilder setWriterUidHash(String writerUidHash) {
this.writerUidHash = writerUidHash;
return this;
}
/**
* Sets the uid hash of the committer operator used to recover state.
*
* @param committerUidHash uid hash denoting the committer operator
* @return {@link SinkOperatorUidHashesBuilder}
*/
public SinkOperatorUidHashesBuilder setCommitterUidHash(String committerUidHash) {
this.committerUidHash = committerUidHash;
return this;
}
/**
* Sets the uid hash of the global committer operator used to recover state.
*
* @param globalCommitterUidHash uid hash denoting the global committer operator
* @return {@link SinkOperatorUidHashesBuilder}
*/
public SinkOperatorUidHashesBuilder setGlobalCommitterUidHash(
String globalCommitterUidHash) {
this.globalCommitterUidHash = globalCommitterUidHash;
return this;
}
/**
* Constructs the {@link CustomSinkOperatorUidHashes} with the given uid hashes.
*
* @return {@link CustomSinkOperatorUidHashes}
*/
public CustomSinkOperatorUidHashes build() {
return new CustomSinkOperatorUidHashes(
writerUidHash, committerUidHash, globalCommitterUidHash);
}
}
}
| SinkOperatorUidHashesBuilder |
java | spring-projects__spring-security | config/src/integration-test/java/org/springframework/security/config/annotation/rsocket/SimpleAuthenticationITests.java | {
"start": 5057,
"end": 6093
} | class ____ {
@Bean
ServerController controller() {
return new ServerController();
}
@Bean
RSocketMessageHandler messageHandler() {
RSocketMessageHandler handler = new RSocketMessageHandler();
handler.setRSocketStrategies(rsocketStrategies());
return handler;
}
@Bean
RSocketStrategies rsocketStrategies() {
return RSocketStrategies.builder().encoder(new SimpleAuthenticationEncoder()).build();
}
@Bean
PayloadSocketAcceptorInterceptor rsocketInterceptor(RSocketSecurity rsocket) {
rsocket.authorizePayload((authorize) -> authorize.anyRequest().authenticated().anyExchange().permitAll())
.simpleAuthentication(Customizer.withDefaults());
return rsocket.build();
}
@Bean
MapReactiveUserDetailsService uds() {
// @formatter:off
UserDetails rob = User.withDefaultPasswordEncoder()
.username("rob")
.password("password")
.roles("USER", "ADMIN")
.build();
// @formatter:on
return new MapReactiveUserDetailsService(rob);
}
}
@Controller
static | Config |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoClient.java | {
"start": 6857,
"end": 7983
} | class ____ decode each document into
* @param <T> the target document type of the iterable.
* @param options the stream options
* @return the stream of change stream.
*/
<T> Multi<ChangeStreamDocument<T>> watch(List<? extends Bson> pipeline, Class<T> clazz,
ChangeStreamOptions options);
/**
* Creates a change stream for this client.
*
* @param clientSession the client session with which to associate this operation
* @return the stream of change stream.
*/
Multi<ChangeStreamDocument<Document>> watch(ClientSession clientSession);
/**
* Creates a change stream for this client.
*
* @param clientSession the client session with which to associate this operation
* @param options the stream options
* @return the stream of change stream.
*/
Multi<ChangeStreamDocument<Document>> watch(ClientSession clientSession, ChangeStreamOptions options);
/**
* Creates a change stream for this client.
*
* @param clientSession the client session with which to associate this operation
* @param clazz the | to |
java | quarkusio__quarkus | extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/AccessTokenAnnotationTest.java | {
"start": 1135,
"end": 6541
} | class ____ {
final static OidcTestClient client = new OidcTestClient();
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(DefaultClientDefaultExchange.class, DefaultClientEnabledExchange.class,
NamedClientDefaultExchange.class, MultiProviderFrontendResource.class, ProtectedResource.class,
CustomAccessTokenRequestFilter.class, NamedClientDefaultExchange_OnMethod.class,
DefaultClientEnabledExchange_OnMethod.class, DefaultClientDefaultExchange_OnMethod.class,
MultipleClientsAndMultipleMethods.class)
.addAsResource(
new StringAsset(
"""
quarkus.oidc.auth-server-url=${keycloak.url:replaced-by-test}/realms/quarkus
quarkus.oidc.client-id=quarkus-app
quarkus.oidc.credentials.secret=secret
quarkus.oidc-client.auth-server-url=${quarkus.oidc.auth-server-url}
quarkus.oidc-client.client-id=${quarkus.oidc.client-id}
quarkus.oidc-client.credentials.client-secret.value=${quarkus.oidc.credentials.secret}
quarkus.oidc-client.credentials.client-secret.method=post
quarkus.oidc-client.grant.type=jwt
quarkus.oidc-client.scopes=https://graph.microsoft.com/user.read,offline_access
quarkus.oidc-client.grant-options.jwt.requested_token_use=on_behalf_of
quarkus.oidc-client.token-path=${keycloak.url}/realms/quarkus/jwt-bearer-token
quarkus.oidc-client.named.auth-server-url=${quarkus.oidc-client.auth-server-url}
quarkus.oidc-client.named.client-id=${quarkus.oidc-client.client-id}
quarkus.oidc-client.named.credentials.client-secret.value=${quarkus.oidc-client.credentials.client-secret.value}
quarkus.oidc-client.named.credentials.client-secret.method=${quarkus.oidc-client.credentials.client-secret.method}
quarkus.oidc-client.named.grant.type=${quarkus.oidc-client.grant.type}
quarkus.oidc-client.named.scopes=${quarkus.oidc-client.scopes}
quarkus.oidc-client.named.grant-options.jwt.requested_token_use=${quarkus.oidc-client.grant-options.jwt.requested_token_use}
quarkus.oidc-client.named.token-path=${quarkus.oidc-client.token-path}
"""),
"application.properties"));
@AfterAll
public static void close() {
client.close();
}
@Test
public void testDefaultClientEnabledTokenExchange() {
testRestClientTokenPropagation(true, "defaultClientEnabledExchange");
testRestClientTokenPropagation(true, "defaultClientEnabledExchange_OnMethod");
testRestClientTokenPropagation(true, "multipleClientsAndMultipleMethods_DefaultClientEnabledExchange");
}
@Test
public void testDefaultClientDefaultTokenExchange() {
testRestClientTokenPropagation(false, "defaultClientDefaultExchange");
testRestClientTokenPropagation(false, "defaultClientDefaultExchange_OnMethod");
testRestClientTokenPropagation(false, "multipleClientsAndMultipleMethods_DefaultClientDefaultExchange");
}
@Test
public void testNamedClientDefaultTokenExchange() {
testRestClientTokenPropagation(true, "namedClientDefaultExchange");
testRestClientTokenPropagation(true, "namedClientDefaultExchange_OnMethod");
testRestClientTokenPropagation(true, "multipleClientsAndMultipleMethods_NamedClientDefaultExchange");
}
@Test
public void testNoTokenPropagation() {
RestAssured.given().auth().oauth2(getBearerAccessToken())
.queryParam("client-key", "multipleClientsAndMultipleMethods_NoAccessToken")
.when().get("/frontend/token-propagation")
.then()
.statusCode(401);
}
private void testRestClientTokenPropagation(boolean exchangeEnabled, String clientKey) {
String newTokenUsername = exchangeEnabled ? "bob" : "alice";
RestAssured.given().auth().oauth2(getBearerAccessToken())
.queryParam("client-key", clientKey)
.when().get("/frontend/token-propagation")
.then()
.statusCode(200)
.body(equalTo("original token username: alice new token username: " + newTokenUsername));
}
public String getBearerAccessToken() {
return client.getAccessToken("alice", "alice");
}
@RegisterRestClient(baseUri = "http://localhost:8081/protected")
@AccessToken
@Path("/")
public | AccessTokenAnnotationTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/STS2EndpointBuilderFactory.java | {
"start": 14008,
"end": 17516
} | interface ____
extends
EndpointProducerBuilder {
default STS2EndpointBuilder basic() {
return (STS2EndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedSTS2EndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedSTS2EndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* To use an existing configured AWS STS client.
*
* The option is a:
* <code>software.amazon.awssdk.services.sts.StsClient</code> type.
*
* Group: advanced
*
* @param stsClient the value to set
* @return the dsl builder
*/
default AdvancedSTS2EndpointBuilder stsClient(software.amazon.awssdk.services.sts.StsClient stsClient) {
doSetProperty("stsClient", stsClient);
return this;
}
/**
* To use an existing configured AWS STS client.
*
* The option will be converted to a
* <code>software.amazon.awssdk.services.sts.StsClient</code> type.
*
* Group: advanced
*
* @param stsClient the value to set
* @return the dsl builder
*/
default AdvancedSTS2EndpointBuilder stsClient(String stsClient) {
doSetProperty("stsClient", stsClient);
return this;
}
}
public | AdvancedSTS2EndpointBuilder |
java | apache__hadoop | hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ParsedTask.java | {
"start": 1088,
"end": 1265
} | class ____ {@link LoggedTask}. This provides also the
* extra information about the task obtained from job history which is not
* written to the JSON trace file.
*/
public | around |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/strictness/StrictnessMockAnnotationTest.java | {
"start": 695,
"end": 1492
} | class ____ {
public @Rule MockitoRule rule = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS);
@Mock(strictness = Mock.Strictness.LENIENT)
IMethods lenientMock;
@Mock IMethods regularMock;
@Test
public void mock_is_lenient() {
when(lenientMock.simpleMethod("1")).thenReturn("1");
// then lenient mock does not throw:
ProductionCode.simpleMethod(lenientMock, "3");
}
@Test
public void mock_is_strict() {
when(regularMock.simpleMethod("2")).thenReturn("2");
Assertions.assertThatThrownBy(() -> ProductionCode.simpleMethod(regularMock, "4"))
.isInstanceOf(PotentialStubbingProblem.class);
}
}
public static | StrictStubsTest |
java | apache__camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/sftp/integration/SftpSimpleProduceThroughProxyIT.java | {
"start": 1634,
"end": 4618
} | class ____ extends SftpServerTestSupport {
private static HttpProxyServer proxyServer;
private final int proxyPort = AvailablePortFinder.getNextAvailable();
@BeforeEach
public void setupProxy() {
proxyServer = DefaultHttpProxyServer.bootstrap()
.withPort(proxyPort)
.withProxyAuthenticator(new ProxyAuthenticator() {
@Override
public boolean authenticate(String userName, String password) {
return "user".equals(userName) && "password".equals(password);
}
@Override
public String getRealm() {
return "myrealm";
}
}).start();
}
@AfterEach
public void cleanup() {
proxyServer.stop();
}
@Test
public void testSftpSimpleProduceThroughProxy() {
template.sendBodyAndHeader(
"sftp://localhost:{{ftp.server.port}}/{{ftp.root.dir}}"
+ "?username=admin&password=admin&proxy=#proxy",
"Hello World", Exchange.FILE_NAME,
"hello.txt");
File file = ftpFile("hello.txt").toFile();
assertTrue(file.exists(), "File should exist: " + file);
assertEquals("Hello World", context.getTypeConverter().convertTo(String.class, file));
}
@Test
public void testSftpSimpleSubPathProduceThroughProxy() {
template.sendBodyAndHeader(
"sftp://localhost:{{ftp.server.port}}/{{ftp.root.dir}}"
+ "/mysub?username=admin&password=admin&proxy=#proxy&knownHostsFile="
+ service.getKnownHostsFile(),
"Bye World", Exchange.FILE_NAME,
"bye.txt");
File file = ftpFile("mysub/bye.txt").toFile();
assertTrue(file.exists(), "File should exist: " + file);
assertEquals("Bye World", context.getTypeConverter().convertTo(String.class, file));
}
@Test
public void testSftpSimpleTwoSubPathProduceThroughProxy() {
template.sendBodyAndHeader("sftp://localhost:{{ftp.server.port}}/{{ftp.root.dir}}"
+ "/mysub/myother?username=admin&password=admin&proxy=#proxy&knownHostsFile="
+ service.getKnownHostsFile(),
"Farewell World",
Exchange.FILE_NAME, "farewell.txt");
File file = ftpFile("mysub/myother/farewell.txt").toFile();
assertTrue(file.exists(), "File should exist: " + file);
assertEquals("Farewell World", context.getTypeConverter().convertTo(String.class, file));
}
@BindToRegistry("proxy")
public ProxyHTTP createProxy() {
final ProxyHTTP proxyHTTP = new ProxyHTTP("localhost", proxyPort);
proxyHTTP.setUserPasswd("user", "password");
return proxyHTTP;
}
}
| SftpSimpleProduceThroughProxyIT |
java | apache__camel | core/camel-cloud/src/main/java/org/apache/camel/impl/cloud/ServiceRegistrationRoutePolicy.java | {
"start": 1554,
"end": 7073
} | class ____ extends RoutePolicySupport implements CamelContextAware {
private static final Logger LOGGER = LoggerFactory.getLogger(ServiceRegistrationRoutePolicy.class);
private final ServiceRegistry.Selector serviceRegistrySelector;
private ServiceRegistry serviceRegistry;
private CamelContext camelContext;
public ServiceRegistrationRoutePolicy() {
this(null, ServiceRegistrySelectors.DEFAULT_SELECTOR);
}
public ServiceRegistrationRoutePolicy(ServiceRegistry.Selector serviceRegistrySelector) {
this(null, serviceRegistrySelector);
}
public ServiceRegistrationRoutePolicy(ServiceRegistry serviceRegistry, ServiceRegistry.Selector serviceRegistrySelector) {
this.serviceRegistry = serviceRegistry;
this.serviceRegistrySelector = serviceRegistrySelector;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
// ***********************
// policy life-cycle
// ***********************
@Override
public void doStart() throws Exception {
if (serviceRegistry == null) {
serviceRegistry = ServiceRegistryHelper.lookupService(camelContext, serviceRegistrySelector).orElseThrow(
() -> new IllegalStateException("ServiceRegistry service not found"));
}
LOGGER.debug("ServiceRegistrationRoutePolicy {} is using ServiceRegistry instance {} (id={}, type={})",
this,
serviceRegistry,
serviceRegistry.getId(),
serviceRegistry.getClass().getName());
}
// ***********************
// route life-cycle
// ***********************
@Override
public void onStart(Route route) {
register(route);
}
@Override
public void onStop(Route route) {
deregister(route);
}
@Override
public void onSuspend(Route route) {
deregister(route);
}
@Override
public void onResume(Route route) {
register(route);
}
// ***********************
// registration helpers
// ***********************
private void register(Route route) {
computeServiceDefinition(route).ifPresent(serviceRegistry::register);
}
private void deregister(Route route) {
computeServiceDefinition(route).ifPresent(serviceRegistry::deregister);
}
private Optional<ServiceDefinition> computeServiceDefinition(Route route) {
final Endpoint endpoint = route.getConsumer().getEndpoint();
final Map<String, String> properties = new HashMap<>();
if (endpoint instanceof DiscoverableService service) {
// first load all the properties from the endpoint
properties.putAll(service.getServiceProperties());
}
// then add additional properties from route with ServiceDefinition.SERVICE_META_PREFIX,
// note that route defined properties may override DiscoverableService
// provided ones
for (Map.Entry<String, Object> entry : route.getProperties().entrySet()) {
if (!entry.getKey().startsWith(ServiceDefinition.SERVICE_META_PREFIX)) {
continue;
}
final String key = entry.getKey();
final String val = camelContext.getTypeConverter().convertTo(String.class, entry.getValue());
properties.put(key, val);
}
// try to get the service name from route properties
String serviceName = properties.get(ServiceDefinition.SERVICE_META_NAME);
if (serviceName == null) {
// if not check if the route group is defined use the route group
serviceName = route.getGroup();
if (serviceName != null) {
properties.put(ServiceDefinition.SERVICE_META_NAME, serviceName);
}
}
if (ObjectHelper.isEmpty(serviceName)) {
LOGGER.debug("Route {} has not enough information for service registration", route);
return Optional.empty();
}
// try to get the service id from route properties
String serviceId = properties.get(ServiceDefinition.SERVICE_META_ID);
if (serviceId == null) {
// if not check if the route id is custom and use it
boolean custom = "true".equals(route.getProperties().get(Route.CUSTOM_ID_PROPERTY));
if (custom) {
serviceId = route.getId();
}
if (serviceId != null) {
properties.put(ServiceDefinition.SERVICE_META_ID, serviceId);
}
}
if (serviceId == null) {
// finally auto generate the service id
serviceId = getCamelContext().getUuidGenerator().generateUuid();
}
final String serviceHost = properties.get(ServiceDefinition.SERVICE_META_HOST);
final String servicePort = properties.getOrDefault(ServiceDefinition.SERVICE_META_PORT, "-1");
// Build the final resource definition from bits collected from the
// endpoint and the route.
return Optional.of(
new DefaultServiceDefinition(
serviceId,
serviceName,
serviceHost,
Integer.parseInt(servicePort),
properties));
}
}
| ServiceRegistrationRoutePolicy |
java | elastic__elasticsearch | x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformAction.java | {
"start": 2930,
"end": 11744
} | class ____ extends AbstractTransportGetResourcesAction<TransformConfig, Request, Response> {
private static final String DANGLING_TASK_ERROR_MESSAGE_FORMAT =
"Found task for transform [%s], but no configuration for it. To delete this transform use DELETE with force=true.";
private final ClusterService clusterService;
private final Client client;
@Inject
public TransportGetTransformAction(
TransportService transportService,
ActionFilters actionFilters,
ClusterService clusterService,
Client client,
NamedXContentRegistry xContentRegistry
) {
super(GetTransformAction.NAME, transportService, actionFilters, Request::new, client, xContentRegistry);
this.clusterService = clusterService;
this.client = client;
}
@Override
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId());
final ClusterState clusterState = clusterService.state();
TransformNodes.warnIfNoTransformNodes(clusterState);
RemainingTime remainingTime = RemainingTime.from(Instant::now, request.timeout());
// Step 2: Search for all the transform tasks (matching the request) that *do not* have corresponding transform config.
ActionListener<QueryPage<TransformConfig>> searchTransformConfigsListener = listener.delegateFailureAndWrap((l, r) -> {
if (request.checkForDanglingTasks()) {
getAllTransformIds(request, r, remainingTime, l.delegateFailureAndWrap((ll, transformConfigIds) -> {
var errors = TransformTask.findTransformTasks(request.getId(), clusterState)
.stream()
.map(PersistentTasksCustomMetadata.PersistentTask::getId)
.filter(not(transformConfigIds::contains))
.map(
transformId -> new Response.Error(
"dangling_task",
Strings.format(DANGLING_TASK_ERROR_MESSAGE_FORMAT, transformId)
)
)
.toList();
ll.onResponse(new Response(r.results(), r.count(), errors.isEmpty() ? null : errors));
}));
} else {
l.onResponse(new Response(r.results(), r.count(), null));
}
});
// Step 1: Search for all the transform configs matching the request.
searchResources(request, parentTaskId, searchTransformConfigsListener);
}
@Override
protected ParseField getResultsField() {
return TransformField.TRANSFORMS;
}
@Override
protected String[] getIndices() {
return new String[] {
TransformInternalIndexConstants.INDEX_NAME_PATTERN,
TransformInternalIndexConstants.INDEX_NAME_PATTERN_DEPRECATED };
}
@Override
protected TransformConfig parse(XContentParser parser) {
return TransformConfig.fromXContent(parser, null, true);
}
@Override
protected ResourceNotFoundException notFoundException(String resourceId) {
return new ResourceNotFoundException(TransformMessages.getMessage(TransformMessages.REST_UNKNOWN_TRANSFORM, resourceId));
}
@Override
protected String executionOrigin() {
return TRANSFORM_ORIGIN;
}
@Override
protected String extractIdFromResource(TransformConfig transformConfig) {
return transformConfig.getId();
}
@Override
protected QueryBuilder additionalQuery() {
return QueryBuilders.termQuery(INDEX_DOC_TYPE.getPreferredName(), TransformConfig.NAME);
}
@Override
protected SearchSourceBuilder customSearchOptions(SearchSourceBuilder searchSourceBuilder) {
return searchSourceBuilder.sort("_index", SortOrder.DESC).sort(TransformField.ID.getPreferredName(), SortOrder.ASC);
}
private void getAllTransformIds(
Request request,
QueryPage<TransformConfig> initialResults,
RemainingTime remainingTime,
ActionListener<Set<String>> listener
) {
ActionListener<Stream<String>> transformIdListener = listener.map(stream -> stream.collect(toSet()));
var requestedPage = initialResults.results().stream().map(TransformConfig::getId);
if (initialResults.count() == initialResults.results().size()) {
transformIdListener.onResponse(requestedPage);
} else {
// if we do not have all of our transform ids already, we have to go get them
// we'll read everything after our current page, then we'll reverse and read everything before our current page
var from = request.getPageParams().getFrom();
var size = request.getPageParams().getSize();
var idTokens = ExpandedIdsMatcher.tokenizeExpression(request.getResourceId());
getAllTransformIds(idTokens, false, from, size, remainingTime, transformIdListener.delegateFailureAndWrap((l, nextPages) -> {
var currentPages = Stream.concat(requestedPage, nextPages);
getAllTransformIds(idTokens, true, from, size, remainingTime, l.map(firstPages -> Stream.concat(firstPages, currentPages)));
}));
}
}
private void getAllTransformIds(
String[] idTokens,
boolean reverse,
int from,
int size,
RemainingTime remainingTime,
ActionListener<Stream<String>> listener
) {
if (reverse && from <= 0) {
listener.onResponse(Stream.empty());
return;
}
var thisPage = reverse ? from - size : from + size;
var thisPageFrom = Math.max(0, thisPage);
var thisPageSize = thisPage < 0 ? from : size;
SearchRequest request = client.prepareSearch(
TransformInternalIndexConstants.INDEX_NAME_PATTERN,
TransformInternalIndexConstants.INDEX_NAME_PATTERN_DEPRECATED
)
.addSort(TransformField.ID.getPreferredName(), SortOrder.ASC)
.addSort("_index", SortOrder.DESC)
.setFrom(thisPageFrom)
.setSize(thisPageSize)
.setTimeout(remainingTime.get())
.setFetchSource(false)
.setTrackTotalHits(true)
.addDocValueField(TransformField.ID.getPreferredName())
.setQuery(query(idTokens))
.request();
executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
TRANSFORM_ORIGIN,
request,
listener.<SearchResponse>delegateFailureAndWrap((l, searchResponse) -> {
var transformIds = Arrays.stream(searchResponse.getHits().getHits())
.map(hit -> (String) hit.field(TransformField.ID.getPreferredName()).getValue())
.filter(Predicate.not(org.elasticsearch.common.Strings::isNullOrEmpty))
.toList()
.stream();
if (searchResponse.getHits().getHits().length == size) {
getAllTransformIds(
idTokens,
reverse,
thisPageFrom,
thisPageSize,
remainingTime,
l.map(nextTransformIds -> Stream.concat(transformIds, nextTransformIds))
);
} else {
l.onResponse(transformIds);
}
}),
client::search
);
}
private static QueryBuilder query(String[] idTokens) {
var queryBuilder = QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(), TransformConfig.NAME));
if (org.elasticsearch.common.Strings.isAllOrWildcard(idTokens) == false) {
var shouldQueries = new BoolQueryBuilder();
var terms = new ArrayList<String>();
for (String token : idTokens) {
if (Regex.isSimpleMatchPattern(token)) {
shouldQueries.should(QueryBuilders.wildcardQuery(TransformField.ID.getPreferredName(), token));
} else {
terms.add(token);
}
}
if (terms.isEmpty() == false) {
shouldQueries.should(QueryBuilders.termsQuery(TransformField.ID.getPreferredName(), terms));
}
if (shouldQueries.should().isEmpty() == false) {
queryBuilder.filter(shouldQueries);
}
}
return QueryBuilders.constantScoreQuery(queryBuilder);
}
}
| TransportGetTransformAction |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/support/jsse/AbstractJsseParametersTest.java | {
"start": 1385,
"end": 4931
} | class ____ extends TestSupport {
protected CamelContext createPropertiesPlaceholderAwareContext() throws Exception {
Properties supplementalProperties = new Properties();
KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType());
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
SecureRandom sr = null;
try {
sr = SecureRandom.getInstance("SHA1PRNG");
} catch (NoSuchAlgorithmException e) {
// Ignore
}
SSLContext sslc = SSLContext.getInstance("TLS");
sslc.init(null, null, null);
SSLSocket socket = (SSLSocket) sslc.getSocketFactory().createSocket();
supplementalProperties.setProperty("keyStoreParameters.type", KeyStore.getDefaultType());
supplementalProperties.setProperty("keyStoreParameters.provider", ks.getProvider().getName());
supplementalProperties.setProperty("keyManagersParameters.algorithm", KeyManagerFactory.getDefaultAlgorithm());
supplementalProperties.setProperty("keyManagersParameters.provider", kmf.getProvider().getName());
supplementalProperties.setProperty("trustManagersParameters.algorithm", TrustManagerFactory.getDefaultAlgorithm());
supplementalProperties.setProperty("trustManagersParameters.provider", tmf.getProvider().getName());
if (sr != null) {
supplementalProperties.setProperty("secureRandomParameters.algorithm", "SHA1PRNG");
supplementalProperties.setProperty("secureRandomParameters.provider", sr.getProvider().getName());
}
supplementalProperties.setProperty("sslContextParameters.provider", sslc.getProvider().getName());
supplementalProperties.setProperty("cipherSuite.0", socket.getSupportedCipherSuites()[0]);
// Have to skip this guy because he doesn't work with TLS as the
// SSLContext protocol
String ssp = "";
for (String protocol : socket.getSupportedProtocols()) {
if (!"SSLv2Hello".equals(protocol)) {
ssp = protocol;
break;
}
}
supplementalProperties.setProperty("secureSocketProtocol.0", ssp);
return this.createPropertiesPlaceholderAwareContext(supplementalProperties);
}
protected CamelContext createPropertiesPlaceholderAwareContext(Properties supplementalProperties) throws IOException {
Properties properties = new Properties(supplementalProperties);
properties.load(AbstractJsseParametersTest.class.getResourceAsStream("test.properties"));
if (supplementalProperties != null) {
Properties mergedProps = new Properties();
Set<String> keys = new HashSet<>();
keys.addAll(properties.stringPropertyNames());
keys.addAll(supplementalProperties.stringPropertyNames());
for (String key : keys) {
mergedProps.setProperty(key, properties.getProperty(key));
}
properties = mergedProps;
}
properties.store(new FileOutputStream("target/jsse-test.properties"),
"Generated by " + AbstractJsseParametersTest.class.getName());
CamelContext context = new DefaultCamelContext();
context.getPropertiesComponent().setLocation("file:./target/jsse-test.properties");
return context;
}
}
| AbstractJsseParametersTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/scalar/EltFunction.java | {
"start": 1176,
"end": 1648
} | class ____ extends BuiltInScalarFunction {
public EltFunction(SpecializedContext context) {
super(BuiltInFunctionDefinitions.ELT, context);
}
public @Nullable Object eval(@Nullable Number index, Object... exprs) {
if (index == null) {
return null;
}
long idx = index.longValue();
if (idx < 1 || idx > exprs.length) {
return null;
}
return exprs[(int) index - 1];
}
}
| EltFunction |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/WelfordAlgorithm.java | {
"start": 695,
"end": 2372
} | class ____ {
private double mean;
private double m2;
private long count;
public double mean() {
return mean;
}
public double m2() {
return m2;
}
public long count() {
return count;
}
public WelfordAlgorithm() {
this(0, 0, 0);
}
public WelfordAlgorithm(double mean, double m2, long count) {
this.mean = mean;
this.m2 = m2;
this.count = count;
}
public void add(int value) {
add((double) value);
}
public void add(long value) {
add((double) value);
}
public void add(double value) {
final double delta = value - mean;
count += 1;
mean += delta / count;
m2 += delta * (value - mean);
}
public void add(double meanValue, double m2Value, long countValue) {
if (countValue == 0) {
return;
}
if (count == 0) {
mean = meanValue;
m2 = m2Value;
count = countValue;
return;
}
double delta = mean - meanValue;
m2 += m2Value + delta * delta * count * countValue / (count + countValue);
mean = (mean * count + meanValue * countValue) / (count + countValue);
count += countValue;
}
/**
* Evaluate the variance or standard deviation.
* @param stdDev if true, compute standard deviation, otherwise variance
* @return
*/
public double evaluate(boolean stdDev) {
if (stdDev == false) {
return count < 2 ? 0 : m2 / count;
} else {
return count < 2 ? 0 : Math.sqrt(m2 / count);
}
}
}
| WelfordAlgorithm |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/LdifEndpointBuilderFactory.java | {
"start": 4357,
"end": 6319
} | interface ____ {
/**
* LDIF (camel-ldif)
* Perform updates on an LDAP server from an LDIF body content.
*
* Category: database,security
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-ldif
*
* Syntax: <code>ldif:ldapConnectionName</code>
*
* Path parameter: ldapConnectionName (required)
* The name of the LdapConnection bean to pull from the registry. Note
* that this must be of scope prototype to avoid it being shared among
* threads or using a connection that has timed out.
*
* @param path ldapConnectionName
* @return the dsl builder
*/
default LdifEndpointBuilder ldif(String path) {
return LdifEndpointBuilderFactory.endpointBuilder("ldif", path);
}
/**
* LDIF (camel-ldif)
* Perform updates on an LDAP server from an LDIF body content.
*
* Category: database,security
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-ldif
*
* Syntax: <code>ldif:ldapConnectionName</code>
*
* Path parameter: ldapConnectionName (required)
* The name of the LdapConnection bean to pull from the registry. Note
* that this must be of scope prototype to avoid it being shared among
* threads or using a connection that has timed out.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path ldapConnectionName
* @return the dsl builder
*/
default LdifEndpointBuilder ldif(String componentName, String path) {
return LdifEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static LdifEndpointBuilder endpointBuilder(String componentName, String path) {
| LdifBuilders |
java | bumptech__glide | annotation/src/main/java/com/bumptech/glide/annotation/GlideOption.java | {
"start": 2586,
"end": 2648
} | class ____ in JAR.
@Retention(RetentionPolicy.CLASS)
public @ | files |
java | quarkusio__quarkus | test-framework/common/src/main/java/io/quarkus/test/common/ArtifactLauncher.java | {
"start": 512,
"end": 1063
} | interface ____ {
int httpPort();
int httpsPort();
Duration waitTime();
String testProfile();
List<String> argLine();
/**
* Additional environment variables to be passed to the launched process.
* Note: When Quarkus launches the new process, it will always include the environment
* variables of the current process
*/
Map<String, String> env();
ArtifactLauncher.InitContext.DevServicesLaunchResult getDevServicesLaunchResult();
| InitContext |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RedisEndpointBuilderFactory.java | {
"start": 40385,
"end": 40697
} | class ____ extends AbstractEndpointBuilder implements RedisEndpointBuilder, AdvancedRedisEndpointBuilder {
public RedisEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new RedisEndpointBuilderImpl(path);
}
} | RedisEndpointBuilderImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/enumeratedvalue/EnumAndColumnDefinitionTest.java | {
"start": 4355,
"end": 4738
} | enum ____ value is saved.
*/
@JdbcTypeCode(SqlTypes.VARCHAR)
AnotherMyEnum anotherMyEnum2;
String name;
public TestEntity() {
}
public TestEntity(Long id, MyEnum myEnum, AnotherMyEnum anotherMyEnum, AnotherMyEnum anotherMyEnum2) {
this.id = id;
this.myEnum = myEnum;
this.anotherMyEnum = anotherMyEnum;
this.anotherMyEnum2 = anotherMyEnum2;
}
}
}
| string |
java | qos-ch__slf4j | jcl-over-slf4j/src/main/java/org/apache/commons/logging/impl/SLF4JLocationAwareLog.java | {
"start": 939,
"end": 1173
} | interface ____
* delegates all processing to a wrapped {@link Logger org.slf4j.Logger}
* instance.
*
* <p>
* JCL's FATAL level is mapped to ERROR. All other levels map one to one.
*
* @author Ceki Gülcü
*/
public | which |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/AdapterMethodsTest.java | {
"start": 23103,
"end": 24415
} | class ____ {
@ToJson
void lineToJson(JsonWriter writer, Line line, @WithParens JsonAdapter<Point> pointAdapter)
throws IOException {
writer.beginArray();
pointAdapter.toJson(writer, line.a);
pointAdapter.toJson(writer, line.b);
writer.endArray();
}
@FromJson
Line lineFromJson(JsonReader reader, @WithParens JsonAdapter<Point> pointAdapter)
throws Exception {
reader.beginArray();
Point a = pointAdapter.fromJson(reader);
Point b = pointAdapter.fromJson(reader);
reader.endArray();
return new Line(a, b);
}
}
@Test
public void writerAndReaderTakingMultipleJsonAdapterParameters() throws Exception {
Moshi moshi =
new Moshi.Builder()
.add(new PointWriterAndReaderJsonAdapter())
.add(new PointWithParensJsonAdapter())
.add(new JsonAdapterWithWriterAndReaderTakingMultipleJsonAdapterParameters())
.build();
JsonAdapter<Line> lineAdapter = moshi.adapter(Line.class);
Line line = new Line(new Point(5, 8), new Point(3, 2));
assertThat(lineAdapter.toJson(line)).isEqualTo("[[5,8],\"(3 2)\"]");
assertThat(lineAdapter.fromJson("[[5,8],\"(3 2)\"]")).isEqualTo(line);
}
static | JsonAdapterWithWriterAndReaderTakingAnnotatedJsonAdapterParameter |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/BatchEncoder.java | {
"start": 10763,
"end": 11784
} | class ____ extends MVEncoder {
protected Ints(int batchSize) {
super(batchSize);
}
/**
* Is there capacity for this many {@code int}s?
*/
protected final boolean hasCapacity(int count) {
return bytes.length() + count * Integer.BYTES <= bytesCapacity();
}
/**
* Make sure there is capacity for this many {@code int}s, growing
* the buffer if needed.
*/
protected final void ensureCapacity(int count) {
// TODO some protection against growing to gigabytes or whatever
bytes.grow(count * Integer.BYTES);
}
/**
* Encode an {@code int} into the current position and advance
* to the next position.
*/
protected final void encode(int v) {
addingValue();
intHandle.set(bytes.bytes(), bytes.length(), v);
bytes.setLength(bytes.length() + Integer.BYTES);
}
}
private static | Ints |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/spi/ClassTransformer.java | {
"start": 1401,
"end": 1720
} | class ____ that can be loaded
*/
@Override
byte[] transform(
@Nullable ClassLoader loader,
String className,
@Nullable Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws TransformerException;
void discoverTypes(ClassLoader loader, String className);
}
| file |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/InferenceMetadataFieldsMapper.java | {
"start": 1255,
"end": 2749
} | class ____ extends MetadataFieldMapper {
/**
* Internal index setting to control the format used for semantic text fields.
* Determines whether to use the legacy format (default: true).
* This setting is immutable and can only be defined at index creation
* to ensure the internal format remains consistent throughout the index's lifecycle.
*/
public static final Setting<Boolean> USE_LEGACY_SEMANTIC_TEXT_FORMAT = Setting.boolSetting(
"index.mapping.semantic_text.use_legacy_format",
false,
Setting.Property.Final,
Setting.Property.IndexScope,
Setting.Property.InternalIndex
);
// Check index version SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP because that index version was added in the same serverless promotion
// where the new format was enabled by default
public static final IndexVersion USE_NEW_SEMANTIC_TEXT_FORMAT_BY_DEFAULT = IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP;
public static final String NAME = "_inference_fields";
public static final String CONTENT_TYPE = "_inference_fields";
protected InferenceMetadataFieldsMapper(MappedFieldType inferenceFieldType) {
super(inferenceFieldType);
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public InferenceMetadataFieldType fieldType() {
return (InferenceMetadataFieldType) super.fieldType();
}
public abstract static | InferenceMetadataFieldsMapper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/various/OneToOneOptimisticLockTest.java | {
"start": 934,
"end": 1845
} | class ____ {
public final static Integer PARENT_ID = 1;
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent parent = new Parent( PARENT_ID );
session.persist( parent );
}
);
}
@Test
public void testUpdateChildDoesNotIncrementParentVersion(SessionFactoryScope scope) {
Integer version = scope.fromTransaction(
session -> {
Parent parent = session.find( Parent.class, PARENT_ID );
Integer vers = parent.getVersion();
Child child = new Child( 2 );
parent.addChild( child );
session.persist( child );
return vers;
}
);
scope.inTransaction(
session -> {
Parent parent = session.find( Parent.class, PARENT_ID );
assertThat( parent.getVersion() ).isEqualTo( version );
}
);
}
@Entity(name = "Parent")
@Table(name = "PARENT_TABLE")
public static | OneToOneOptimisticLockTest |
java | apache__rocketmq | store/src/main/java/org/apache/rocketmq/store/kv/CompactionLog.java | {
"start": 35119,
"end": 35693
} | class ____ implements CompactionAppendMsgCallback {
@Override
public AppendMessageResult doAppend(ByteBuffer bbDest, long fileFromOffset, int maxBlank, ByteBuffer bbSrc) {
ByteBuffer endInfo = ByteBuffer.allocate(END_FILE_MIN_BLANK_LENGTH);
endInfo.putInt(maxBlank);
endInfo.putInt(BLANK_MAGIC_CODE);
return new AppendMessageResult(AppendMessageStatus.END_OF_FILE,
fileFromOffset + bbDest.position(), maxBlank, System.currentTimeMillis());
}
}
static | CompactionAppendEndMsgCallback |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/RecursiveComparisonAssert.java | {
"start": 51726,
"end": 53993
} | class ____ {
* int number;
* String street;
*
* // only compares number, ouch!
* {@literal @}Override
* public boolean equals(final Object other) {
* if (!(other instanceof Address)) return false;
* Address castOther = (Address) other;
* return Objects.equals(number, castOther.number);
* }
* }
*
* Person sherlock = new Person("Sherlock", 1.80);
* sherlock.home.address.street = "Baker Street";
* sherlock.home.address.number = 221;
*
* Person sherlock2 = new Person("Sherlock", 1.80);
* sherlock2.home.address.street = "Butcher Street";
* sherlock2.home.address.number = 221;
*
* // assertion succeeds because overridden equals are used and thus street fields are mot compared
* assertThat(sherlock).usingRecursiveComparison()
* .usingOverriddenEquals()
* .isEqualTo(sherlock2);
*
* // ignoringOverriddenEqualsForFields force a recursive comparison on the field matching the regex
* // now this assertion fails as we expect since the home.address.street fields differ
* assertThat(sherlock).usingRecursiveComparison()
* .usingOverriddenEquals()
* .ignoringOverriddenEqualsForFieldsMatchingRegexes("home.*")
* .isEqualTo(sherlock2);</code></pre>
*
* @param regexes regexes used to specify the fields we want to force a recursive comparison on.
* @return this {@link RecursiveComparisonAssert} to chain other methods.
*/
@CheckReturnValue
public SELF ignoringOverriddenEqualsForFieldsMatchingRegexes(String... regexes) {
recursiveComparisonConfiguration.ignoreOverriddenEqualsForFieldsMatchingRegexes(regexes);
return myself;
}
/**
* Makes the recursive comparison to ignore collection order in all fields of the object under test.
* <p>
* <b>Important:</b> ignoring collection order has a high performance cost because each element of the actual collection must
* be compared to each element of the expected collection which is an O(n²) operation. For example with a collection of 100
* elements, the number of comparisons is 100x100 = 10 000!
* <p>
* Example:
* <pre><code class='java'> | Address |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/result/method/annotation/InitBinderBindingContext.java | {
"start": 1657,
"end": 4753
} | class ____ extends BindingContext {
private final List<SyncInvocableHandlerMethod> binderMethods;
private final BindingContext binderMethodContext;
private final SessionStatus sessionStatus = new SimpleSessionStatus();
private @Nullable Runnable saveModelOperation;
InitBinderBindingContext(
@Nullable WebBindingInitializer initializer, List<SyncInvocableHandlerMethod> binderMethods,
boolean methodValidationApplicable, ReactiveAdapterRegistry registry) {
super(initializer, registry);
this.binderMethods = binderMethods;
this.binderMethodContext = new BindingContext(initializer, registry);
setMethodValidationApplicable(methodValidationApplicable);
}
/**
* Return the {@link SessionStatus} instance to use that can be used to
* signal that session processing is complete.
*/
public SessionStatus getSessionStatus() {
return this.sessionStatus;
}
/**
* Returns an instance of {@link ExtendedWebExchangeDataBinder}.
* @since 6.2.1
*/
@Override
protected WebExchangeDataBinder createBinderInstance(@Nullable Object target, String name) {
return new ExtendedWebExchangeDataBinder(target, name);
}
@Override
protected WebExchangeDataBinder initDataBinder(WebExchangeDataBinder dataBinder, ServerWebExchange exchange) {
this.binderMethods.stream()
.filter(binderMethod -> {
InitBinder ann = binderMethod.getMethodAnnotation(InitBinder.class);
Assert.state(ann != null, "No InitBinder annotation");
String[] names = ann.value();
return (ObjectUtils.isEmpty(names) ||
ObjectUtils.containsElement(names, dataBinder.getObjectName()));
})
.forEach(method -> invokeBinderMethod(dataBinder, exchange, method));
return dataBinder;
}
private void invokeBinderMethod(
WebExchangeDataBinder dataBinder, ServerWebExchange exchange, SyncInvocableHandlerMethod binderMethod) {
HandlerResult result = binderMethod.invokeForHandlerResult(exchange, this.binderMethodContext, dataBinder);
if (result != null && result.getReturnValue() != null) {
throw new IllegalStateException(
"@InitBinder methods must not return a value (should be void): " + binderMethod);
}
// Should not happen (no Model argument resolution) ...
if (!this.binderMethodContext.getModel().asMap().isEmpty()) {
throw new IllegalStateException(
"@InitBinder methods are not allowed to add model attributes: " + binderMethod);
}
}
/**
* Provide the context required to promote model attributes listed as
* {@code @SessionAttributes} to the session during {@link #updateModel}.
*/
public void setSessionContext(SessionAttributesHandler attributesHandler, WebSession session) {
this.saveModelOperation = () -> {
if (getSessionStatus().isComplete()) {
attributesHandler.cleanupAttributes(session);
}
else {
attributesHandler.storeAttributes(session, getModel().asMap());
}
};
}
@Override
public void updateModel(ServerWebExchange exchange) {
if (this.saveModelOperation != null) {
this.saveModelOperation.run();
}
super.updateModel(exchange);
}
}
| InitBinderBindingContext |
java | quarkusio__quarkus | extensions/security/deployment/src/test/java/io/quarkus/security/test/permissionsallowed/ClassLevelCustomPermissionsAllowedTest.java | {
"start": 859,
"end": 1472
} | class ____ {
static final String WRITE_PERMISSION = "write";
static final String WRITE_PERMISSION_BEAN = "write:bean";
static final String READ_PERMISSION = "read";
static final String READ_PERMISSION_BEAN = "read:bean";
private final AuthData USER = new AuthData(Collections.singleton("user"), false, "user",
Set.of(createPermission("read", (String[]) null)));
private final AuthData ADMIN = new AuthData(Collections.singleton("admin"), false, "admin",
Set.of(createPermission("write", (String[]) null)));
// mechanism for | ClassLevelCustomPermissionsAllowedTest |
java | resilience4j__resilience4j | resilience4j-spring6/src/test/java/io/github/resilience4j/spring6/micrometer/configure/DefaultTimerTest.java | {
"start": 1819,
"end": 4046
} | class ____ {
@Autowired
private MeterRegistry meterRegistry;
@Autowired
private TimerRegistry timerRegistry;
@Autowired
private DefaultTimedService service;
@Test
public void shouldTimeBasicOperation() {
Timer timer = timerRegistry.timer(BASIC_OPERATION_TIMER_NAME);
String result1 = service.succeed(123);
thenSuccessTimed(meterRegistry, timer);
then(result1).isEqualTo("123");
try {
service.fail();
} catch (IllegalStateException e) {
thenFailureTimed(meterRegistry, timer, e);
}
String result2 = service.recover(123);
thenFailureTimed(meterRegistry, timer, new IllegalStateException());
then(result2).isEqualTo("Basic operation recovered 123");
Timer spelTimer = timerRegistry.timer(BASIC_OPERATION_TIMER_NAME + "SpEl");
String result3 = service.recover(spelTimer.getName(), 123);
thenFailureTimed(meterRegistry, spelTimer, new IllegalStateException());
then(result3).isEqualTo("Basic operation recovered 123");
}
@Test
public void shouldTimeCompletableStage() throws Throwable {
Timer timer = timerRegistry.timer(COMPLETABLE_STAGE_TIMER_NAME);
String result1 = service.succeedCompletionStage(123).toCompletableFuture().get();
thenSuccessTimed(meterRegistry, timer);
then(result1).isEqualTo("123");
try {
service.failCompletionStage().toCompletableFuture().get();
} catch (ExecutionException e) {
thenFailureTimed(meterRegistry, timer, e.getCause());
}
String result2 = service.recoverCompletionStage(123).toCompletableFuture().get();
thenFailureTimed(meterRegistry, timer, new IllegalStateException());
then(result2).isEqualTo("Completable stage recovered 123");
Timer spelTimer = timerRegistry.timer(COMPLETABLE_STAGE_TIMER_NAME + "SpEl");
String result3 = service.recoverCompletionStage(spelTimer.getName(), 123).toCompletableFuture().get();
thenFailureTimed(meterRegistry, spelTimer, new IllegalStateException());
then(result3).isEqualTo("Completable stage recovered 123");
}
}
| DefaultTimerTest |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/ContextPathIntegrationTests.java | {
"start": 2243,
"end": 4491
} | class ____ {
static Stream<Arguments> httpServers() {
return Stream.of(
argumentSet("Jetty", new JettyHttpServer()),
argumentSet("Jetty Core", new JettyCoreHttpServer()),
argumentSet("Reactor Netty", new ReactorHttpServer()),
argumentSet("Tomcat", new TomcatHttpServer())
);
}
@ParameterizedTest
@MethodSource("httpServers")
void multipleWebFluxApps(AbstractHttpServer server) throws Exception {
AnnotationConfigApplicationContext context1 = new AnnotationConfigApplicationContext(WebAppConfig.class);
AnnotationConfigApplicationContext context2 = new AnnotationConfigApplicationContext(WebAppConfig.class);
HttpHandler webApp1Handler = WebHttpHandlerBuilder.applicationContext(context1).build();
HttpHandler webApp2Handler = WebHttpHandlerBuilder.applicationContext(context2).build();
server.registerHttpHandler("/webApp1", webApp1Handler);
server.registerHttpHandler("/webApp2", webApp2Handler);
server.afterPropertiesSet();
server.start();
try {
RestTemplate restTemplate = new RestTemplate();
String actual;
String url = "http://localhost:" + server.getPort() + "/webApp1/test";
actual = restTemplate.getForObject(url, String.class);
assertThat(actual).isEqualTo("Tested in /webApp1");
url = "http://localhost:" + server.getPort() + "/webApp2/test";
actual = restTemplate.getForObject(url, String.class);
assertThat(actual).isEqualTo("Tested in /webApp2");
}
finally {
server.stop();
}
}
@Test
void servletPathMapping() throws Exception {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(WebAppConfig.class);
TomcatHttpServer server = new TomcatHttpServer();
server.setContextPath("/app");
server.setServletMapping("/api/*");
HttpHandler httpHandler = WebHttpHandlerBuilder.applicationContext(context).build();
server.setHandler(httpHandler);
server.afterPropertiesSet();
server.start();
try {
String url = "http://localhost:" + server.getPort() + "/app/api/test";
String actual = new RestTemplate().getForObject(url, String.class);
assertThat(actual).isEqualTo("Tested in /app/api");
}
finally {
server.stop();
}
}
@EnableWebFlux
@Configuration
static | ContextPathIntegrationTests |
java | google__guice | core/test/com/google/inject/spi/ElementsTest.java | {
"start": 51558,
"end": 51702
} | class ____ {
@Inject
@Named("foo")
@SampleAnnotation
String a;
C(@Named("bar") @SampleAnnotation Integer b) {}
}
private | C |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/mapping/DenormalizedTable.java | {
"start": 552,
"end": 4632
} | class ____ extends Table {
private final Table includedTable;
private List<Column> reorderedColumns;
public DenormalizedTable(
String contributor,
Namespace namespace,
Identifier physicalTableName,
boolean isAbstract,
Table includedTable) {
super( contributor, namespace, physicalTableName, isAbstract );
this.includedTable = includedTable;
includedTable.setHasDenormalizedTables();
}
public DenormalizedTable(
String contributor,
Namespace namespace,
Identifier physicalTableName,
String subselectFragment,
boolean isAbstract,
Table includedTable) {
super( contributor, namespace, physicalTableName, subselectFragment, isAbstract );
this.includedTable = includedTable;
includedTable.setHasDenormalizedTables();
}
public DenormalizedTable(
String contributor,
Namespace namespace,
String subselect,
boolean isAbstract,
Table includedTable) {
super( contributor, namespace, subselect, isAbstract );
this.includedTable = includedTable;
includedTable.setHasDenormalizedTables();
}
@Override
public void createForeignKeys(MetadataBuildingContext context) {
includedTable.createForeignKeys( context );
for ( var foreignKey : includedTable.getForeignKeyCollection() ) {
final var referencedClass =
foreignKey.resolveReferencedClass( context.getMetadataCollector() );
// the ForeignKeys created in the first pass did not have their referenced table initialized
if ( foreignKey.getReferencedTable() == null ) {
foreignKey.setReferencedTable( referencedClass.getTable() );
}
final var denormalizedForeignKey = createDenormalizedForeignKey( foreignKey );
createForeignKey(
context.getBuildingOptions()
.getImplicitNamingStrategy()
.determineForeignKeyName( new ForeignKeyNameSource( denormalizedForeignKey, this, context ) )
.render( context.getMetadataCollector().getDatabase().getDialect() ),
foreignKey.getColumns(),
foreignKey.getReferencedEntityName(),
foreignKey.getKeyDefinition(),
foreignKey.getOptions(),
foreignKey.getReferencedColumns()
);
}
}
private ForeignKey createDenormalizedForeignKey(ForeignKey includedTableFk) {
final var denormalizedForeignKey = new ForeignKey(this);
denormalizedForeignKey.setReferencedEntityName( includedTableFk.getReferencedEntityName() );
denormalizedForeignKey.setKeyDefinition( includedTableFk.getKeyDefinition() );
denormalizedForeignKey.setOptions( includedTableFk.getOptions() );
denormalizedForeignKey.setReferencedTable( includedTableFk.getReferencedTable() );
denormalizedForeignKey.addReferencedColumns( includedTableFk.getReferencedColumns() );
for ( var keyColumn : includedTableFk.getColumns() ) {
denormalizedForeignKey.addColumn( keyColumn );
}
return denormalizedForeignKey;
}
@Override
public Column getColumn(Column column) {
final var superColumn = super.getColumn( column );
return superColumn != null ? superColumn : includedTable.getColumn(column);
}
public Column getColumn(Identifier name) {
final var superColumn = super.getColumn( name );
return superColumn != null ? superColumn : includedTable.getColumn(name);
}
@Override
public Collection<Column> getColumns() {
if ( reorderedColumns != null ) {
return reorderedColumns;
}
return new JoinedList<>( new ArrayList<>( includedTable.getColumns() ), new ArrayList<>( super.getColumns() ) );
}
@Override
public boolean containsColumn(Column column) {
return super.containsColumn( column ) || includedTable.containsColumn( column );
}
@Override
public PrimaryKey getPrimaryKey() {
return includedTable.getPrimaryKey();
}
public Table getIncludedTable() {
return includedTable;
}
@Internal
@Override
public void reorderColumns(List<Column> columns) {
assert includedTable.getColumns().size() + super.getColumns().size() == columns.size()
&& columns.containsAll( includedTable.getColumns() )
&& columns.containsAll( super.getColumns() )
&& reorderedColumns == null;
this.reorderedColumns = columns;
}
}
| DenormalizedTable |
java | quarkusio__quarkus | extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/MessageBundleRecorder.java | {
"start": 1060,
"end": 1152
} | interface ____)
Map<String, Map<String, Class<?>>> getBundleInterfaces();
}
}
| class |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KinesisFirehose2EndpointBuilderFactory.java | {
"start": 22560,
"end": 24276
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final KinesisFirehose2HeaderNameBuilder INSTANCE = new KinesisFirehose2HeaderNameBuilder();
/**
* The record ID, as defined in
* http://docs.aws.amazon.com/firehose/latest/APIReference/API_PutRecord.html#API_PutRecord_ResponseSyntaxResponse Syntax.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsKinesisFirehoseRecordId}.
*/
public String awsKinesisFirehoseRecordId() {
return "CamelAwsKinesisFirehoseRecordId";
}
/**
* The operation we want to perform.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsKinesisFirehoseOperation}.
*/
public String awsKinesisFirehoseOperation() {
return "CamelAwsKinesisFirehoseOperation";
}
/**
* The name of the delivery stream.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code
* AwsKinesisFirehoseDeliveryStreamName}.
*/
public String awsKinesisFirehoseDeliveryStreamName() {
return "CamelAwsKinesisFirehoseDeliveryStreamName";
}
}
static KinesisFirehose2EndpointBuilder endpointBuilder(String componentName, String path) {
| KinesisFirehose2HeaderNameBuilder |
java | apache__camel | components/camel-thrift/src/test/java/org/apache/camel/component/thrift/generated/Calculator.java | {
"start": 145239,
"end": 148139
} | class ____ extends org.apache.thrift.scheme.StandardScheme<calculate_result> {
@Override
public void read(org.apache.thrift.protocol.TProtocol iprot, calculate_result struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.success = iprot.readI32();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 1: // OUCH
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.ouch = new InvalidOperation();
struct.ouch.read(iprot);
struct.setOuchIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
@Override
public void write(org.apache.thrift.protocol.TProtocol oprot, calculate_result struct)
throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.isSetSuccess()) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeI32(struct.success);
oprot.writeFieldEnd();
}
if (struct.ouch != null) {
oprot.writeFieldBegin(OUCH_FIELD_DESC);
struct.ouch.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static | calculate_resultStandardScheme |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/jdk/JDKDateDeserializers.java | {
"start": 115,
"end": 185
} | class ____ core pre-Java8 JDK date/time type deserializers.
*/
public | for |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/predicate/GatewayRequestPredicates.java | {
"start": 13217,
"end": 15126
} | class ____ implements RequestPredicate, ChangePathPatternParserVisitor.Target {
private PathPattern pattern;
HostPatternPredicate(PathPattern pattern) {
Objects.requireNonNull(pattern, "'pattern' must not be null");
this.pattern = pattern;
}
@Override
public boolean test(ServerRequest request) {
String host = request.headers().firstHeader(HttpHeaders.HOST);
if (host == null) {
host = "";
}
PathContainer pathContainer = PathContainer.parsePath(host, PathContainer.Options.MESSAGE_ROUTE);
PathPattern.PathMatchInfo info = this.pattern.matchAndExtract(pathContainer);
traceMatch("Pattern", this.pattern.getPatternString(), host, info != null);
if (info != null) {
MvcUtils.putUriTemplateVariables(request, info.getUriVariables());
return true;
}
else {
return false;
}
}
@Override
public Optional<ServerRequest> nest(ServerRequest request) {
throw new UnsupportedOperationException("nest is not supported");
// return
// Optional.ofNullable(this.pattern.matchStartOfPath(request.requestPath().pathWithinApplication()))
// .map(info -> new RequestPredicates.SubPathServerRequestWrapper(request,
// info, this.pattern));
}
@Override
public void accept(RequestPredicates.Visitor visitor) {
visitor.header(HttpHeaders.HOST, this.pattern.getPatternString());
}
@Override
public void changeParser(PathPatternParser parser) {
String patternString = this.pattern.getPatternString();
this.pattern = parser.parse(patternString);
}
@Override
public String toString() {
return this.pattern.getPatternString();
}
}
/**
* Implementation of {@link RouterFunctions.Visitor} that changes the
* {@link PathPatternParser} on path-related request predicates (i.e.
* {@code RequestPredicates.PathPatternPredicate}.
*
* @author Arjen Poutsma
* @since 5.3
*/
private static | HostPatternPredicate |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/quote/ColumnDefinitionQuotingTest.java | {
"start": 5901,
"end": 6114
} | class ____ {
@Id
@Column( columnDefinition = "`explicitly quoted`" )
private Integer id;
@ManyToOne
@JoinColumn( columnDefinition = "`explicitly quoted`" )
private E1 other;
}
@Entity
public static | E1 |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/ast/GenericPlaceholderElement.java | {
"start": 1299,
"end": 2295
} | interface ____ extends GenericElement {
/**
* Returns the bounds of this the generic placeholder empty. Always returns a non-empty list.
*
* @return The bounds declared for this type variable.
*/
@NonNull
List<? extends ClassElement> getBounds();
/**
* @return The name of the placeholder variable.
*/
@NonNull
String getVariableName();
/**
* @return The element declaring this variable, if it can be determined. Must be either a method or a class.
*/
Optional<Element> getDeclaringElement();
/**
* @return The required element declaring this variable, if it can be determined. Must be either a method or a class. Or throws an exception.
* @since 4.0.0
*/
@NonNull
default Element getRequiredDeclaringElement() {
return getDeclaringElement().orElseThrow(() -> new IllegalStateException("Declared element is not present!"));
}
/**
* In some cases the | GenericPlaceholderElement |
java | apache__camel | components/camel-mdc/src/test/java/org/apache/camel/mdc/MDCSelectedHeadersTest.java | {
"start": 1391,
"end": 3458
} | class ____ extends ExchangeTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(MDCAllPropertiesTest.class);
@Override
protected CamelContext createCamelContext() throws Exception {
MDCService mdcSvc = new MDCService();
mdcSvc.setCustomHeaders("head1,head2,head3");
CamelContext context = super.createCamelContext();
CamelContextAware.trySetCamelContext(mdcSvc, context);
mdcSvc.init(context);
return context;
}
@Test
void testRouteSingleRequest() throws IOException {
template.request("direct:start", null);
// We should get no MDC after the route has been executed
assertEquals(0, MDC.getCopyOfContextMap().size());
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.routeId("start")
.log("A message")
.setHeader("head1", simple("Header1"))
.setHeader("head2", simple("Header2"))
// head3 is missing on purpose!
.setHeader("head4", simple("Header4"))
.process(exchange -> {
LOG.info("A process");
assertNotNull(MDC.get(MDCService.MDC_MESSAGE_ID));
assertNotNull(MDC.get(MDCService.MDC_EXCHANGE_ID));
assertNotNull(MDC.get(MDCService.MDC_ROUTE_ID));
assertNotNull(MDC.get(MDCService.MDC_CAMEL_CONTEXT_ID));
assertEquals("Header1", MDC.get("head1"));
assertEquals("Header2", MDC.get("head2"));
assertNull(MDC.get("head3"));
assertNull(MDC.get("head4"));
})
.to("log:info");
}
};
}
}
| MDCSelectedHeadersTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SftpComponentBuilderFactory.java | {
"start": 1808,
"end": 6698
} | interface ____ extends ComponentBuilder<SftpComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default SftpComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default SftpComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default SftpComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Used for enabling or disabling all consumer based health checks from
* this component.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: health
*
* @param healthCheckConsumerEnabled the value to set
* @return the dsl builder
*/
default SftpComponentBuilder healthCheckConsumerEnabled(boolean healthCheckConsumerEnabled) {
doSetProperty("healthCheckConsumerEnabled", healthCheckConsumerEnabled);
return this;
}
/**
* Used for enabling or disabling all producer based health checks from
* this component. Notice: Camel has by default disabled all producer
* based health-checks. You can turn on producer checks globally by
* setting camel.health.producersEnabled=true.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: health
*
* @param healthCheckProducerEnabled the value to set
* @return the dsl builder
*/
default SftpComponentBuilder healthCheckProducerEnabled(boolean healthCheckProducerEnabled) {
doSetProperty("healthCheckProducerEnabled", healthCheckProducerEnabled);
return this;
}
}
| SftpComponentBuilder |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/UuidGeneratorBenchmark.java | {
"start": 1052,
"end": 1340
} | class ____ {
@Benchmark
public UUID base() {
return null;
}
@Benchmark
public UUID randomUUID() {
return UUID.randomUUID();
}
@Benchmark
public UUID timeBasedUUID() {
return UuidUtil.getTimeBasedUuid();
}
}
| UuidGeneratorBenchmark |
java | resilience4j__resilience4j | resilience4j-circularbuffer/src/main/java/io/github/resilience4j/circularbuffer/ConcurrentEvictingQueue.java | {
"start": 1974,
"end": 2042
} | class ____ thread-safe, and does NOT accept null elements.
*/
public | IS |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.