language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/Mailers.java | {
"start": 1273,
"end": 1466
} | class ____ a sort of producer for mailer instances.
* <p>
* It isn't a CDI producer in the literal sense, but it creates a synthetic bean
* from {@code MailerProcessor}.
*/
@Singleton
public | is |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/NotifyCheckpointAbortedITCase.java | {
"start": 21245,
"end": 21817
} | class ____ implements HighAvailabilityServicesFactory {
@Override
public HighAvailabilityServices createHAServices(
Configuration configuration, Executor executor) {
final CheckpointRecoveryFactory checkpointRecoveryFactory =
PerJobCheckpointRecoveryFactory.withoutCheckpointStoreRecovery(
maxCheckpoints -> new TestingCompletedCheckpointStore());
return new EmbeddedHaServicesWithLeadershipControl(executor, checkpointRecoveryFactory);
}
}
}
| TestingHAFactory |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableCombineLatest.java | {
"start": 14697,
"end": 14888
} | class ____ implements Function<T, R> {
@Override
public R apply(T t) throws Throwable {
return combiner.apply(new Object[] { t });
}
}
}
| SingletonArrayFunc |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/Output.java | {
"start": 1550,
"end": 1758
} | interface ____ can be used to emit elements and other messages, such as barriers and
* watermarks, from an operator.
*
* @param <T> The type of the elements that can be emitted.
*/
@PublicEvolving
public | that |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotShardStats.java | {
"start": 18844,
"end": 21327
} | class ____ implements Writeable, ToXContentObject {
protected final long count;
protected final long total;
protected final long min;
protected final long max;
public Counter(final long count, final long total, final long min, final long max) {
this.count = count;
this.total = total;
this.min = min;
this.max = max;
}
Counter(final StreamInput in) throws IOException {
this.count = in.readZLong();
this.total = in.readZLong();
this.min = in.readZLong();
this.max = in.readZLong();
}
public Counter add(Counter counter) {
return new Counter(count + counter.count, total + counter.total, Math.min(min, counter.min), Math.max(max, counter.max));
}
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeZLong(count);
out.writeZLong(total);
out.writeZLong(min);
out.writeZLong(max);
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.field("count", count);
builder.field("sum", total);
builder.field("min", min);
builder.field("max", max);
innerToXContent(builder, params);
}
builder.endObject();
return builder;
}
void innerToXContent(XContentBuilder builder, Params params) throws IOException {}
public long getCount() {
return count;
}
public long getTotal() {
return total;
}
public long getMin() {
return min;
}
public long getMax() {
return max;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
Counter that = (Counter) other;
return count == that.count && total == that.total && min == that.min && max == that.max;
}
@Override
public int hashCode() {
return Objects.hash(count, total, min, max);
}
}
public static | Counter |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/core/BeanPropertyRowMapper.java | {
"start": 3251,
"end": 3327
} | class ____<T> implements Function<Readable, T> {
/** The | BeanPropertyRowMapper |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetSubClustersRequestPBImpl.java | {
"start": 1263,
"end": 1344
} | class ____ responsible for get subclusters requests.
*/
@Private
@Unstable
public | is |
java | quarkusio__quarkus | extensions/reactive-mssql-client/deployment/src/test/java/io/quarkus/reactive/mssql/client/MultipleDataSourcesAndMSSQLPoolCreatorsTest.java | {
"start": 3097,
"end": 3528
} | class ____ implements MSSQLPoolCreator {
@Override
public Pool create(Input input) {
assertEquals(55555, input.msSQLConnectOptions().getPort()); // validate that the bean has been called for the proper datasource
return Pool.pool(input.vertx(), input.msSQLConnectOptions().setHost("localhost").setPort(1435),
input.poolOptions());
}
}
}
| HibernateMSSQLPoolCreator |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilterInitializer.java | {
"start": 1431,
"end": 2569
} | class ____ extends FilterInitializer {
private String configPrefix;
public AuthFilterInitializer() {
this.configPrefix = "hadoop.http.authentication.";
}
protected Map<String, String> createFilterConfig(Configuration conf) {
Map<String, String> filterConfig = AuthenticationFilterInitializer
.getFilterConfigMap(conf, configPrefix);
for (Map.Entry<String, String> entry : conf.getPropsWithPrefix(
ProxyUsers.CONF_HADOOP_PROXYUSER).entrySet()) {
filterConfig.put("proxyuser" + entry.getKey(), entry.getValue());
}
if (filterConfig.get("type") == null) {
filterConfig.put("type", UserGroupInformation.isSecurityEnabled() ?
KerberosAuthenticationHandler.TYPE :
PseudoAuthenticationHandler.TYPE);
}
//set cookie path
filterConfig.put("cookie.path", "/");
return filterConfig;
}
@Override
public void initFilter(FilterContainer container, Configuration conf) {
Map<String, String> filterConfig = createFilterConfig(conf);
container.addFilter("AuthFilter", AuthFilter.class.getName(),
filterConfig);
}
} | AuthFilterInitializer |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/support/PropertySourcesPlaceholderConfigurer.java | {
"start": 9712,
"end": 11667
} | class ____ extends PropertySource<ConfigurableEnvironment> {
ConfigurableEnvironmentPropertySource(ConfigurableEnvironment environment) {
super(ENVIRONMENT_PROPERTIES_PROPERTY_SOURCE_NAME, environment);
}
@Override
public boolean containsProperty(String name) {
for (PropertySource<?> propertySource : super.source.getPropertySources()) {
if (propertySource.containsProperty(name)) {
return true;
}
}
return false;
}
@Override
// Declare String as covariant return type, since a String is actually required.
public @Nullable String getProperty(String name) {
for (PropertySource<?> propertySource : super.source.getPropertySources()) {
Object candidate = propertySource.getProperty(name);
if (candidate != null) {
return convertToString(candidate);
}
}
return null;
}
/**
* Convert the supplied value to a {@link String} using the {@link ConversionService}
* from the {@link Environment}.
* <p>This is a modified version of
* {@link org.springframework.core.env.AbstractPropertyResolver#convertValueIfNecessary(Object, Class)}.
* @param value the value to convert
* @return the converted value, or the original value if no conversion is necessary
* @since 6.2.8
*/
private @Nullable String convertToString(Object value) {
if (value instanceof String string) {
return string;
}
return super.source.getConversionService().convert(value, String.class);
}
@Override
public String toString() {
return "ConfigurableEnvironmentPropertySource {propertySources=" + super.source.getPropertySources() + "}";
}
}
/**
* Fallback {@link PropertySource} that delegates to a raw {@link Environment}.
* <p>Should never apply in a regular scenario, since the {@code Environment}
* in an {@code ApplicationContext} should always be a {@link ConfigurableEnvironment}.
* @since 6.2.7
*/
private static | ConfigurableEnvironmentPropertySource |
java | elastic__elasticsearch | x-pack/qa/third-party/jira/src/yamlRestTest/java/org/elasticsearch/smoketest/WatcherJiraYamlTestSuiteIT.java | {
"start": 966,
"end": 3078
} | class ____ extends ESClientYamlSuiteTestCase {
public WatcherJiraYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@Before
public void startWatcher() throws Exception {
final List<String> watcherTemplates = List.of(WatcherIndexTemplateRegistryField.TEMPLATE_NAMES_NO_ILM);
assertBusy(() -> {
try {
getAdminExecutionContext().callApi("watcher.start", Map.of(), List.of(), Map.of());
for (String template : watcherTemplates) {
ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi(
"indices.exists_template",
Map.of("name", template),
List.of(),
Map.of()
);
assertThat(templateExistsResponse.getStatusCode(), is(200));
}
ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of());
String state = response.evaluate("stats.0.watcher_state");
assertThat(state, is("started"));
} catch (IOException e) {
throw new AssertionError(e);
}
});
}
@After
public void stopWatcher() throws Exception {
assertBusy(() -> {
try {
getAdminExecutionContext().callApi("watcher.stop", Map.of(), List.of(), Map.of());
ClientYamlTestResponse response = getAdminExecutionContext().callApi("watcher.stats", Map.of(), List.of(), Map.of());
String state = response.evaluate("stats.0.watcher_state");
assertThat(state, is("stopped"));
} catch (IOException e) {
throw new AssertionError(e);
}
}, 60, TimeUnit.SECONDS);
}
}
| WatcherJiraYamlTestSuiteIT |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/jdk/TypedArraySerTest.java | {
"start": 1734,
"end": 1909
} | class ____ implements A {
public int value = 2;
}
@JsonTypeInfo(use=JsonTypeInfo.Id.NAME, include=JsonTypeInfo.As.PROPERTY)
@JsonTypeName("bean")
static | B |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java | {
"start": 1157,
"end": 6340
} | class ____ {
private final JobConf conf;
private final int totalMemoryAllowed;
private AtomicInteger totalMemoryUsed = new AtomicInteger();
private static final Logger LOG = LoggerFactory.getLogger(IndexCache.class);
private final ConcurrentHashMap<String,IndexInformation> cache =
new ConcurrentHashMap<String,IndexInformation>();
private final LinkedBlockingQueue<String> queue =
new LinkedBlockingQueue<String>();
public IndexCache(JobConf conf) {
this.conf = conf;
totalMemoryAllowed =
conf.getInt(MRJobConfig.SHUFFLE_INDEX_CACHE, 10) * 1024 * 1024;
LOG.info("IndexCache created with max memory = " + totalMemoryAllowed);
}
/**
* This method gets the index information for the given mapId and reduce.
* It reads the index file into cache if it is not already present.
* @param mapId
* @param reduce
* @param fileName The file to read the index information from if it is not
* already present in the cache
* @param expectedIndexOwner The expected owner of the index file
* @return The Index Information
* @throws IOException
*/
public IndexRecord getIndexInformation(String mapId, int reduce,
Path fileName, String expectedIndexOwner)
throws IOException {
IndexInformation info = cache.get(mapId);
if (info == null) {
info = readIndexFileToCache(fileName, mapId, expectedIndexOwner);
} else {
synchronized(info) {
while (isUnderConstruction(info)) {
try {
info.wait();
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for construction", e);
}
}
}
LOG.debug("IndexCache HIT: MapId " + mapId + " found");
}
if (info.mapSpillRecord.size() == 0 ||
info.mapSpillRecord.size() <= reduce) {
throw new IOException("Invalid request " +
" Map Id = " + mapId + " Reducer = " + reduce +
" Index Info Length = " + info.mapSpillRecord.size());
}
return info.mapSpillRecord.getIndex(reduce);
}
private boolean isUnderConstruction(IndexInformation info) {
synchronized(info) {
return (null == info.mapSpillRecord);
}
}
private IndexInformation readIndexFileToCache(Path indexFileName,
String mapId,
String expectedIndexOwner)
throws IOException {
IndexInformation info;
IndexInformation newInd = new IndexInformation();
if ((info = cache.putIfAbsent(mapId, newInd)) != null) {
synchronized(info) {
while (isUnderConstruction(info)) {
try {
info.wait();
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for construction", e);
}
}
}
LOG.debug("IndexCache HIT: MapId " + mapId + " found");
return info;
}
LOG.debug("IndexCache MISS: MapId " + mapId + " not found") ;
SpillRecord tmp = null;
try {
tmp = new SpillRecord(indexFileName, conf, expectedIndexOwner);
} catch (Throwable e) {
tmp = new SpillRecord(0);
cache.remove(mapId);
throw new IOException("Error Reading IndexFile", e);
} finally {
synchronized (newInd) {
newInd.mapSpillRecord = tmp;
newInd.notifyAll();
}
}
queue.add(mapId);
if (totalMemoryUsed.addAndGet(newInd.getSize()) > totalMemoryAllowed) {
freeIndexInformation();
}
return newInd;
}
/**
* This method removes the map from the cache if index information for this
* map is loaded(size>0), index information entry in cache will not be
* removed if it is in the loading phrase(size=0), this prevents corruption
* of totalMemoryUsed. It should be called when a map output on this tracker
* is discarded.
* @param mapId The taskID of this map.
*/
public void removeMap(String mapId) {
IndexInformation info = cache.get(mapId);
if (info == null || isUnderConstruction(info)) {
return;
}
info = cache.remove(mapId);
if (info != null) {
totalMemoryUsed.addAndGet(-info.getSize());
if (!queue.remove(mapId)) {
LOG.warn("Map ID" + mapId + " not found in queue!!");
}
} else {
LOG.info("Map ID " + mapId + " not found in cache");
}
}
/**
* This method checks if cache and totolMemoryUsed is consistent.
* It is only used for unit test.
* @return True if cache and totolMemoryUsed is consistent
*/
boolean checkTotalMemoryUsed() {
int totalSize = 0;
for (IndexInformation info : cache.values()) {
totalSize += info.getSize();
}
return totalSize == totalMemoryUsed.get();
}
/**
* Bring memory usage below totalMemoryAllowed.
*/
private synchronized void freeIndexInformation() {
while (totalMemoryUsed.get() > totalMemoryAllowed) {
String s = queue.remove();
IndexInformation info = cache.remove(s);
if (info != null) {
totalMemoryUsed.addAndGet(-info.getSize());
}
}
}
private static | IndexCache |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/Startable.java | {
"start": 76,
"end": 239
} | interface ____ extends Closeable {
void start();
String getConnectionInfo();
// This starts to couple to containers, so we could move it to sub- | Startable |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportAnnotationDetectionTests.java | {
"start": 3841,
"end": 3951
} | class ____ {
@Bean
TestBean testBean2() {
return new TestBean("2");
}
}
@Configuration
static | Config2 |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/OnFailureRestartPolicy.java | {
"start": 1092,
"end": 2843
} | class ____ implements ComponentRestartPolicy {
private static OnFailureRestartPolicy INSTANCE = new OnFailureRestartPolicy();
private OnFailureRestartPolicy() {
}
public static OnFailureRestartPolicy getInstance() {
return INSTANCE;
}
@Override public boolean isLongLived() {
return false;
}
@Override public boolean hasCompleted(Component component) {
if (hasCompletedSuccessfully(component)) {
return true;
}
return false;
}
@Override public boolean hasCompletedSuccessfully(Component component) {
if (component.getNumSucceededInstances() == component
.getNumDesiredInstances()) {
return true;
}
return false;
}
@Override public boolean shouldRelaunchInstance(
ComponentInstance componentInstance, ContainerStatus containerStatus) {
if (ComponentInstance.hasContainerFailed(containerStatus)) {
return true;
}
return false;
}
@Override public boolean isReadyForDownStream(Component dependentComponent) {
if (dependentComponent.getNumReadyInstances()
+ dependentComponent.getNumSucceededInstances()
+ dependentComponent.getNumFailedInstances()
< dependentComponent.getNumDesiredInstances()) {
return false;
}
return true;
}
@Override public boolean allowUpgrades() {
return false;
}
@Override public boolean shouldTerminate(Component component) {
long nSucceeded = component.getNumSucceededInstances();
if (nSucceeded < component.getComponentSpec().getNumberOfContainers()) {
return false;
}
return true;
}
@Override public boolean allowContainerRetriesForInstance(
ComponentInstance componentInstance) {
return true;
}
}
| OnFailureRestartPolicy |
java | apache__camel | components/camel-box/camel-box-component/src/generated/java/org/apache/camel/component/box/internal/BoxApiCollection.java | {
"start": 1317,
"end": 10438
} | class ____ extends ApiCollection<BoxApiName, BoxConfiguration> {
private BoxApiCollection() {
final Map<String, String> aliases = new HashMap<>();
final Map<BoxApiName, ApiMethodHelper<? extends ApiMethod>> apiHelpers = new EnumMap<>(BoxApiName.class);
final Map<Class<? extends ApiMethod>, BoxApiName> apiMethods = new HashMap<>();
List<String> nullableArgs;
aliases.clear();
aliases.put("addFolderCollaboration", "add");
aliases.put("addFolderCollaborationByEmail", "addByEmail");
aliases.put("deleteCollaboration", "delete");
aliases.put("getFolderCollaborations", "collaborations");
aliases.put("getPendingCollaborations", "pendingCollaborations");
aliases.put("getCollaborationInfo", "info");
aliases.put("updateCollaborationInfo", "updateInfo");
nullableArgs = Arrays.asList();
apiHelpers.put(BoxApiName.COLLABORATIONS, new ApiMethodHelper<>(BoxCollaborationsManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxCollaborationsManagerApiMethod.class, BoxApiName.COLLABORATIONS);
aliases.clear();
aliases.put("addFileComment", "add");
aliases.put("changeCommentMessage", "updateMessage");
aliases.put("deleteComment", "delete");
aliases.put("getCommentInfo", "info");
aliases.put("getFileComments", "comments");
aliases.put("replyToComment", "reply");
nullableArgs = Arrays.asList();
apiHelpers.put(BoxApiName.COMMENTS, new ApiMethodHelper<>(BoxCommentsManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxCommentsManagerApiMethod.class, BoxApiName.COMMENTS);
aliases.clear();
aliases.put("getEnterpriseEvents", "events");
nullableArgs = Arrays.asList("position", "types");
apiHelpers.put(BoxApiName.EVENT_LOGS, new ApiMethodHelper<>(BoxEventLogsManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxEventLogsManagerApiMethod.class, BoxApiName.EVENT_LOGS);
aliases.clear();
aliases.put("uploadFile", "upload");
aliases.put("downloadFile", "download");
aliases.put("copyFile", "copy");
aliases.put("moveFile", "move");
aliases.put("renameFile", "rename");
aliases.put("createFileSharedLink", "link");
aliases.put("deleteFile", "delete");
aliases.put("uploadNewFileVersion", "uploadVersion");
aliases.put("promoteFileVersion", "promoteVersion");
aliases.put("getFileVersions", "versions");
aliases.put("downloadPreviousFileVersion", "downloadVersion");
aliases.put("deleteFileVersion", "deleteVersion");
aliases.put("getFileInfo", "info");
aliases.put("updateFileInfo", "updateInfo");
aliases.put("createFileMetadata", "createMetadata");
aliases.put("getFileMetadata", "metadata");
aliases.put("updateFileMetadata", "updateMetadata");
aliases.put("deleteFileMetadata", "deleteMetadata");
aliases.put("getDownloadUrl", "url");
aliases.put("getPreviewLink", "preview");
aliases.put("getFileThumbnail", "thumbnail");
aliases.put("checkUpload", "canUpload");
nullableArgs = Arrays.asList("fields", "created", "modified", "size", "fileSize", "rangeStart", "rangeEnd", "listener", "fileSize", "newName", "unshareDate", "permissions", "typeName", "check");
apiHelpers.put(BoxApiName.FILES, new ApiMethodHelper<>(BoxFilesManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxFilesManagerApiMethod.class, BoxApiName.FILES);
aliases.clear();
aliases.put("getRootFolder", "root");
aliases.put("createFolder", "create");
aliases.put("copyFolder", "copy");
aliases.put("moveFolder", "move");
aliases.put("renameFolder", "rename");
aliases.put("createFolderSharedLink", "link");
aliases.put("deleteFolder", "delete");
aliases.put("getFolder", "folder");
aliases.put("getFolderInfo", "info");
aliases.put("getFolderItems", "items");
aliases.put("updateFolderInfo", "updateInfo");
nullableArgs = Arrays.asList("offset", "limit", "fields", "newName");
apiHelpers.put(BoxApiName.FOLDERS, new ApiMethodHelper<>(BoxFoldersManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxFoldersManagerApiMethod.class, BoxApiName.FOLDERS);
aliases.clear();
aliases.put("createGroup", "create");
aliases.put("deleteGroup", "delete");
aliases.put("getAllGroups", "groups");
aliases.put("getGroupInfo", "info");
aliases.put("addGroupMembership", "addMembership");
aliases.put("deleteGroupMembership", "deleteMembership");
aliases.put("getGroupMemberships", "memberships");
aliases.put("getGroupMembershipInfo", "membershipInfo");
aliases.put("updateGroupMembershipInfo", "updateMembershipInfo");
nullableArgs = Arrays.asList("role", "description", "provenance", "externalSyncIdentifier", "invitabilityLevel", "memberViewabilityLevel");
apiHelpers.put(BoxApiName.GROUPS, new ApiMethodHelper<>(BoxGroupsManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxGroupsManagerApiMethod.class, BoxApiName.GROUPS);
aliases.clear();
nullableArgs = Arrays.asList("startingPosition");
apiHelpers.put(BoxApiName.EVENTS, new ApiMethodHelper<>(BoxEventsManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxEventsManagerApiMethod.class, BoxApiName.EVENTS);
aliases.clear();
aliases.put("searchFolder", "search");
nullableArgs = Arrays.asList();
apiHelpers.put(BoxApiName.SEARCH, new ApiMethodHelper<>(BoxSearchManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxSearchManagerApiMethod.class, BoxApiName.SEARCH);
aliases.clear();
aliases.put("addFileTask", "add");
aliases.put("deleteTask", "delete");
aliases.put("getFileTasks", "tasks");
aliases.put("getTaskInfo", "info");
aliases.put("updateTaskInfo", "updateInfo");
aliases.put("addAssignmentToTask", "addAssignment");
aliases.put("deleteTaskAssignment", "deleteAssignment");
aliases.put("getTaskAssignments", "assignments");
aliases.put("getTaskAssignmentInfo", "assignmentInfo");
nullableArgs = Arrays.asList("message");
apiHelpers.put(BoxApiName.TASKS, new ApiMethodHelper<>(BoxTasksManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxTasksManagerApiMethod.class, BoxApiName.TASKS);
aliases.clear();
aliases.put("getCurrentUser", "currentUser");
aliases.put("getAllEnterpriseOrExternalUsers", "users");
aliases.put("createAppUser", "create");
aliases.put("createEnterpriseUser", "create");
aliases.put("deleteUser", "delete");
aliases.put("addUserEmailAlias", "addEmailAlias");
aliases.put("getUserEmailAlias", "emailAlias");
aliases.put("deleteUserEmailAlias", "deleteEmailAlias");
aliases.put("getUserInfo", "info");
aliases.put("updateUserInfo", "updateInfo");
nullableArgs = Arrays.asList("filterTerm", "fields", "params");
apiHelpers.put(BoxApiName.USERS, new ApiMethodHelper<>(BoxUsersManagerApiMethod.class, aliases, nullableArgs));
apiMethods.put(BoxUsersManagerApiMethod.class, BoxApiName.USERS);
setApiHelpers(apiHelpers);
setApiMethods(apiMethods);
}
public BoxConfiguration getEndpointConfiguration(BoxApiName apiName) {
BoxConfiguration result = null;
switch (apiName) {
case COLLABORATIONS:
result = new BoxCollaborationsManagerEndpointConfiguration();
break;
case COMMENTS:
result = new BoxCommentsManagerEndpointConfiguration();
break;
case EVENT_LOGS:
result = new BoxEventLogsManagerEndpointConfiguration();
break;
case FILES:
result = new BoxFilesManagerEndpointConfiguration();
break;
case FOLDERS:
result = new BoxFoldersManagerEndpointConfiguration();
break;
case GROUPS:
result = new BoxGroupsManagerEndpointConfiguration();
break;
case EVENTS:
result = new BoxEventsManagerEndpointConfiguration();
break;
case SEARCH:
result = new BoxSearchManagerEndpointConfiguration();
break;
case TASKS:
result = new BoxTasksManagerEndpointConfiguration();
break;
case USERS:
result = new BoxUsersManagerEndpointConfiguration();
break;
}
return result;
}
public static BoxApiCollection getCollection() {
return BoxApiCollectionHolder.INSTANCE;
}
private static final | BoxApiCollection |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/test/ListStreamingAdapter.java | {
"start": 504,
"end": 1010
} | class ____<T>
implements KeyStreamingChannel<T>, ValueStreamingChannel<T>, ScoredValueStreamingChannel<T> {
private final List<T> list = new Vector<>();
@Override
public void onKey(T key) {
list.add(key);
}
@Override
public void onValue(T value) {
list.add(value);
}
public List<T> getList() {
return list;
}
@Override
public void onValue(ScoredValue<T> value) {
list.add(value.getValue());
}
}
| ListStreamingAdapter |
java | micronaut-projects__micronaut-core | function/src/main/java/io/micronaut/function/executor/DefaultFunctionExitHandler.java | {
"start": 772,
"end": 1164
} | class ____ implements FunctionExitHandler {
@Override
public void exitWithError(Exception error, boolean debug) {
FunctionApplication.exitWithError(debug, error);
}
@Override
public void exitWithSuccess() {
System.exit(0);
}
@Override
public void exitWithNoData() {
FunctionApplication.exitWithNoData();
}
}
| DefaultFunctionExitHandler |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/metrics/JobMetricsHandlerTestBase.java | {
"start": 1130,
"end": 1802
} | class ____ extends MetricsHandlerTestBase<JobMetricsHandler> {
private static final String TEST_JOB_ID = new JobID().toString();
@Override
JobMetricsHandler getMetricsHandler() {
return new JobMetricsHandler(leaderRetriever, TIMEOUT, TEST_HEADERS, mockMetricFetcher);
}
@Override
QueryScopeInfo getQueryScopeInfo() {
return new QueryScopeInfo.JobQueryScopeInfo(TEST_JOB_ID);
}
@Override
Map<String, String> getPathParameters() {
Map<String, String> pathParameters = new HashMap<>();
pathParameters.put(JobIDPathParameter.KEY, TEST_JOB_ID);
return pathParameters;
}
}
| JobMetricsHandlerTestBase |
java | google__guice | extensions/assistedinject/src/com/google/inject/assistedinject/FactoryProvider2.java | {
"start": 9145,
"end": 37727
} | class ____ of T as a Class<T>
Class<F> factoryRawType = (Class<F>) (Class<?>) factoryType.getRawType();
try {
if (!factoryRawType.isInterface()) {
throw errors.addMessage("%s must be an interface.", factoryRawType).toException();
}
Multimap<String, Method> defaultMethods = HashMultimap.create();
Multimap<String, Method> otherMethods = HashMultimap.create();
ImmutableMap.Builder<Method, AssistData> assistDataBuilder = ImmutableMap.builder();
// TODO: also grab methods from superinterfaces
for (Method method : factoryRawType.getMethods()) {
// Skip static methods
if (Modifier.isStatic(method.getModifiers())) {
continue;
}
// Skip default methods that java8 may have created.
if (isDefault(method) && (method.isBridge() || method.isSynthetic())) {
// Even synthetic default methods need the return type validation...
// unavoidable consequence of javac8. :-(
validateFactoryReturnType(errors, method.getReturnType(), factoryRawType);
defaultMethods.put(method.getName(), method);
continue;
}
otherMethods.put(method.getName(), method);
TypeLiteral<?> returnTypeLiteral = factoryType.getReturnType(method);
Key<?> returnType;
try {
returnType =
Annotations.getKey(returnTypeLiteral, method, method.getAnnotations(), errors);
} catch (ConfigurationException ce) {
// If this was an error due to returnTypeLiteral not being specified, rephrase
// it as our factory not being specified, so it makes more sense to users.
if (isTypeNotSpecified(returnTypeLiteral, ce)) {
throw errors.keyNotFullySpecified(TypeLiteral.get(factoryRawType)).toException();
} else {
throw ce;
}
}
validateFactoryReturnType(errors, returnType.getTypeLiteral().getRawType(), factoryRawType);
List<TypeLiteral<?>> params = factoryType.getParameterTypes(method);
Annotation[][] paramAnnotations = method.getParameterAnnotations();
int p = 0;
List<Key<?>> keys = Lists.newArrayList();
for (TypeLiteral<?> param : params) {
Key<?> paramKey = Annotations.getKey(param, method, paramAnnotations[p++], errors);
Class<?> underlylingType = paramKey.getTypeLiteral().getRawType();
if (underlylingType.equals(Provider.class)
|| underlylingType.equals(jakarta.inject.Provider.class)) {
errors.addMessage(
"A Provider may not be a type in a factory method of an AssistedInject."
+ "\n Offending instance is parameter [%s] with key [%s] on method [%s]",
p, paramKey, method);
}
keys.add(assistKey(method, paramKey, errors));
}
ImmutableList<Key<?>> immutableParamList = ImmutableList.copyOf(keys);
// try to match up the method to the constructor
TypeLiteral<?> implementation = collector.getBindings().get(returnType);
if (implementation == null) {
implementation = returnType.getTypeLiteral();
}
Class<? extends Annotation> scope =
Annotations.findScopeAnnotation(errors, implementation.getRawType());
if (scope != null) {
errors.addMessage(
"Found scope annotation [%s] on implementation class "
+ "[%s] of AssistedInject factory [%s].\nThis is not allowed, please"
+ " remove the scope annotation.",
scope, implementation.getRawType(), factoryType);
}
InjectionPoint ctorInjectionPoint;
try {
ctorInjectionPoint =
findMatchingConstructorInjectionPoint(
method, returnType, implementation, immutableParamList);
} catch (ErrorsException ee) {
errors.merge(ee.getErrors());
continue;
}
Constructor<?> constructor = (Constructor<?>) ctorInjectionPoint.getMember();
List<ThreadLocalProvider> providers = Collections.emptyList();
Set<Dependency<?>> deps = getDependencies(ctorInjectionPoint, implementation);
boolean optimized = false;
// Now go through all dependencies of the implementation and see if it is OK to
// use an optimized form of assistedinject2. The optimized form requires that
// all injections directly inject the object itself (and not a Provider of the object,
// or an Injector), because it caches a single child injector and mutates the Provider
// of the arguments in a ThreadLocal.
if (isValidForOptimizedAssistedInject(deps, implementation.getRawType(), factoryType)) {
ImmutableList.Builder<ThreadLocalProvider> providerListBuilder = ImmutableList.builder();
for (int i = 0; i < params.size(); i++) {
providerListBuilder.add(new ThreadLocalProvider());
}
providers = providerListBuilder.build();
optimized = true;
}
AssistData data =
new AssistData(
constructor,
returnType,
immutableParamList,
implementation,
method,
removeAssistedDeps(deps),
optimized,
providers);
assistDataBuilder.put(method, data);
}
factory =
factoryRawType.cast(
Proxy.newProxyInstance(
factoryRawType.getClassLoader(), new Class<?>[] {factoryRawType}, this));
// Now go back through default methods. Try to use MethodHandles to make things
// work. If that doesn't work, fallback to trying to find compatible method
// signatures.
Map<Method, AssistData> dataSoFar = assistDataBuilder.buildOrThrow();
ImmutableMap.Builder<Method, MethodHandle> methodHandleBuilder = ImmutableMap.builder();
boolean warnedAboutUserLookups = false;
for (Map.Entry<String, Method> entry : defaultMethods.entries()) {
if (!warnedAboutUserLookups
&& userLookups == null
&& !Modifier.isPublic(factory.getClass().getModifiers())) {
warnedAboutUserLookups = true;
logger.log(
Level.WARNING,
"AssistedInject factory {0} is non-public and has javac-generated default methods. "
+ " Please pass a `MethodHandles.lookup()` with"
+ " FactoryModuleBuilder.withLookups when using this factory so that Guice can"
+ " properly call the default methods. Guice will try to workaround this, but "
+ "it does not always work (depending on the method signatures of the factory).",
new Object[] {factoryType});
}
// Note: If the user didn't supply a valid lookup, we always try to fallback to the hacky
// signature comparing workaround below.
// This is because all these shenanigans are only necessary because we're implementing
// AssistedInject through a Proxy. If we were to generate a subclass (which we theoretically
// _could_ do), then we wouldn't inadvertantly proxy the javac-generated default methods
// too (and end up with a stack overflow from infinite recursion).
// As such, we try our hardest to "make things work" requiring requiring extra effort from
// the user.
Method defaultMethod = entry.getValue();
MethodHandle handle = null;
try {
handle =
superMethodHandle(
SuperMethodSupport.METHOD_LOOKUP, defaultMethod, factory, userLookups);
} catch (ReflectiveOperationException e1) {
// If the user-specified lookup failed, try again w/ the private lookup hack.
// If _that_ doesn't work, try the below workaround.
if (allowPrivateLookupFallback
&& SuperMethodSupport.METHOD_LOOKUP != SuperMethodLookup.PRIVATE_LOOKUP) {
try {
handle =
superMethodHandle(
SuperMethodLookup.PRIVATE_LOOKUP, defaultMethod, factory, userLookups);
} catch (ReflectiveOperationException e2) {
// ignored, use below workaround.
}
}
}
Supplier<String> failureMsg =
() ->
"Unable to use non-public factory "
+ factoryRawType.getName()
+ ". Please call"
+ " FactoryModuleBuilder.withLookups(MethodHandles.lookup()) (with a"
+ " lookups that has access to the factory), or make the factory"
+ " public.";
if (handle != null) {
methodHandleBuilder.put(defaultMethod, handle);
} else if (!allowMethodHandleWorkaround) {
errors.addMessage(failureMsg.get());
} else {
boolean foundMatch = false;
for (Method otherMethod : otherMethods.get(defaultMethod.getName())) {
if (dataSoFar.containsKey(otherMethod) && isCompatible(defaultMethod, otherMethod)) {
if (foundMatch) {
errors.addMessage(failureMsg.get());
break;
} else {
assistDataBuilder.put(defaultMethod, dataSoFar.get(otherMethod));
foundMatch = true;
}
}
}
// We always expect to find at least one match, because we only deal with javac-generated
// default methods. If we ever allow user-specified default methods, this will need to
// change.
if (!foundMatch) {
throw new IllegalStateException("Can't find method compatible with: " + defaultMethod);
}
}
}
// If we generated any errors (from finding matching constructors, for instance), throw an
// exception.
if (errors.hasErrors()) {
throw errors.toException();
}
assistDataByMethod = assistDataBuilder.buildOrThrow();
methodHandleByMethod = methodHandleBuilder.buildOrThrow();
} catch (ErrorsException e) {
throw new ConfigurationException(e.getErrors().getMessages());
}
}
static boolean isDefault(Method method) {
// Per the javadoc, default methods are non-abstract, public, non-static.
// They're also in interfaces, but we can guarantee that already since we only act
// on interfaces.
return (method.getModifiers() & (Modifier.ABSTRACT | Modifier.PUBLIC | Modifier.STATIC))
== Modifier.PUBLIC;
}
private boolean isCompatible(Method src, Method dst) {
if (!src.getReturnType().isAssignableFrom(dst.getReturnType())) {
return false;
}
Class<?>[] srcParams = src.getParameterTypes();
Class<?>[] dstParams = dst.getParameterTypes();
if (srcParams.length != dstParams.length) {
return false;
}
for (int i = 0; i < srcParams.length; i++) {
if (!srcParams[i].isAssignableFrom(dstParams[i])) {
return false;
}
}
return true;
}
@Override
public F get() {
return factory;
}
@Override
public Set<Dependency<?>> getDependencies() {
Set<Dependency<?>> combinedDeps = new HashSet<>();
for (AssistData data : assistDataByMethod.values()) {
combinedDeps.addAll(data.dependencies);
}
return ImmutableSet.copyOf(combinedDeps);
}
@Override
public Key<F> getKey() {
return factoryKey;
}
// Safe cast because values are typed to AssistedData, which is an AssistedMethod, and
// the collection is immutable.
@Override
@SuppressWarnings("unchecked")
public Collection<AssistedMethod> getAssistedMethods() {
return (Collection<AssistedMethod>) (Collection<?>) assistDataByMethod.values();
}
@Override
@SuppressWarnings("unchecked")
public <T, V> V acceptExtensionVisitor(
BindingTargetVisitor<T, V> visitor, ProviderInstanceBinding<? extends T> binding) {
if (visitor instanceof AssistedInjectTargetVisitor) {
return ((AssistedInjectTargetVisitor<T, V>) visitor).visit((AssistedInjectBinding<T>) this);
}
return visitor.visit(binding);
}
private void validateFactoryReturnType(Errors errors, Class<?> returnType, Class<?> factoryType) {
if (Modifier.isPublic(factoryType.getModifiers())
&& !Modifier.isPublic(returnType.getModifiers())) {
errors.addMessage(
"%s is public, but has a method that returns a non-public type: %s. "
+ "Due to limitations with java.lang.reflect.Proxy, this is not allowed. "
+ "Please either make the factory non-public or the return type public.",
factoryType, returnType);
}
}
/**
* Returns true if the ConfigurationException is due to an error of TypeLiteral not being fully
* specified.
*/
private boolean isTypeNotSpecified(TypeLiteral<?> typeLiteral, ConfigurationException ce) {
Collection<Message> messages = ce.getErrorMessages();
if (messages.size() == 1) {
Message msg =
Iterables.getOnlyElement(new Errors().keyNotFullySpecified(typeLiteral).getMessages());
return msg.getMessage().equals(Iterables.getOnlyElement(messages).getMessage());
} else {
return false;
}
}
/**
* Finds a constructor suitable for the method. If the implementation contained any constructors
* marked with {@link AssistedInject}, this requires all {@link Assisted} parameters to exactly
* match the parameters (in any order) listed in the method. Otherwise, if no {@link
* AssistedInject} constructors exist, this will default to looking for an {@literal @}{@link
* Inject} constructor.
*/
private <T> InjectionPoint findMatchingConstructorInjectionPoint(
Method method, Key<?> returnType, TypeLiteral<T> implementation, List<Key<?>> paramList)
throws ErrorsException {
Errors errors = new Errors(method);
if (returnType.getTypeLiteral().equals(implementation)) {
errors = errors.withSource(implementation);
} else {
errors = errors.withSource(returnType).withSource(implementation);
}
Class<?> rawType = implementation.getRawType();
if (Modifier.isInterface(rawType.getModifiers())) {
errors.addMessage(
"%s is an interface, not a concrete class. Unable to create AssistedInject factory.",
implementation);
throw errors.toException();
} else if (Modifier.isAbstract(rawType.getModifiers())) {
errors.addMessage(
"%s is abstract, not a concrete class. Unable to create AssistedInject factory.",
implementation);
throw errors.toException();
} else if (Classes.isInnerClass(rawType)) {
errors.cannotInjectInnerClass(rawType);
throw errors.toException();
}
Constructor<?> matchingConstructor = null;
boolean anyAssistedInjectConstructors = false;
// Look for AssistedInject constructors...
for (Constructor<?> constructor : rawType.getDeclaredConstructors()) {
if (constructor.isAnnotationPresent(AssistedInject.class)) {
anyAssistedInjectConstructors = true;
if (constructorHasMatchingParams(implementation, constructor, paramList, errors)) {
if (matchingConstructor != null) {
errors.addMessage(
"%s has more than one constructor annotated with @AssistedInject"
+ " that matches the parameters in method %s. Unable to create "
+ "AssistedInject factory.",
implementation, method);
throw errors.toException();
} else {
matchingConstructor = constructor;
}
}
}
}
if (!anyAssistedInjectConstructors) {
// If none existed, use @Inject or a no-arg constructor.
try {
return InjectionPoint.forConstructorOf(implementation);
} catch (ConfigurationException e) {
errors.merge(e.getErrorMessages());
throw errors.toException();
}
} else {
// Otherwise, use it or fail with a good error message.
if (matchingConstructor != null) {
// safe because we got the constructor from this implementation.
@SuppressWarnings("unchecked")
InjectionPoint ip =
InjectionPoint.forConstructor(
(Constructor<? super T>) matchingConstructor, implementation);
return ip;
} else {
errors.addMessage(
"%s has @AssistedInject constructors, but none of them match the"
+ " parameters in method %s. Unable to create AssistedInject factory.",
implementation, method);
throw errors.toException();
}
}
}
/**
* Matching logic for constructors annotated with AssistedInject. This returns true if and only if
* all @Assisted parameters in the constructor exactly match (in any order) all @Assisted
* parameters the method's parameter.
*/
private boolean constructorHasMatchingParams(
TypeLiteral<?> type, Constructor<?> constructor, List<Key<?>> paramList, Errors errors)
throws ErrorsException {
List<TypeLiteral<?>> params = type.getParameterTypes(constructor);
Annotation[][] paramAnnotations = constructor.getParameterAnnotations();
int p = 0;
List<Key<?>> constructorKeys = Lists.newArrayList();
for (TypeLiteral<?> param : params) {
Key<?> paramKey = Annotations.getKey(param, constructor, paramAnnotations[p++], errors);
constructorKeys.add(paramKey);
}
// Require that every key exist in the constructor to match up exactly.
for (Key<?> key : paramList) {
// If it didn't exist in the constructor set, we can't use it.
if (!constructorKeys.remove(key)) {
return false;
}
}
// If any keys remain and their annotation is Assisted, we can't use it.
for (Key<?> key : constructorKeys) {
if (key.getAnnotationType() == Assisted.class) {
return false;
}
}
// All @Assisted params match up to the method's parameters.
return true;
}
/** Calculates all dependencies required by the implementation and constructor. */
private Set<Dependency<?>> getDependencies(
InjectionPoint ctorPoint, TypeLiteral<?> implementation) {
ImmutableSet.Builder<Dependency<?>> builder = ImmutableSet.builder();
builder.addAll(ctorPoint.getDependencies());
if (!implementation.getRawType().isInterface()) {
for (InjectionPoint ip : InjectionPoint.forInstanceMethodsAndFields(implementation)) {
builder.addAll(ip.getDependencies());
}
}
return builder.build();
}
/** Return all non-assisted dependencies. */
private Set<Dependency<?>> removeAssistedDeps(Set<Dependency<?>> deps) {
ImmutableSet.Builder<Dependency<?>> builder = ImmutableSet.builder();
for (Dependency<?> dep : deps) {
Class<?> annotationType = dep.getKey().getAnnotationType();
if (annotationType == null || !annotationType.equals(Assisted.class)) {
builder.add(dep);
}
}
return builder.build();
}
/**
* Returns true if all dependencies are suitable for the optimized version of AssistedInject. The
* optimized version caches the binding and uses a ThreadLocal Provider, so can only be applied if
* the assisted bindings are immediately provided. This looks for hints that the values may be
* lazily retrieved, by looking for injections of Injector or a Provider for the assisted values.
*/
private boolean isValidForOptimizedAssistedInject(
Set<Dependency<?>> dependencies, Class<?> implementation, TypeLiteral<?> factoryType) {
Set<Dependency<?>> badDeps = null; // optimization: create lazily
for (Dependency<?> dep : dependencies) {
if (isInjectorOrAssistedProvider(dep)) {
if (badDeps == null) {
badDeps = Sets.newHashSet();
}
badDeps.add(dep);
}
}
if (badDeps != null && !badDeps.isEmpty()) {
logger.log(
Level.WARNING,
"AssistedInject factory {0} will be slow "
+ "because {1} has assisted Provider dependencies or injects the Injector. "
+ "Stop injecting @Assisted Provider<T> (instead use @Assisted T) "
+ "or Injector to speed things up. (It will be a ~6500% speed bump!) "
+ "The exact offending deps are: {2}",
new Object[] {factoryType, implementation, badDeps});
return false;
}
return true;
}
/**
* Returns true if the dependency is for {@link Injector} or if the dependency is a {@link
* Provider} for a parameter that is {@literal @}{@link Assisted}.
*/
private boolean isInjectorOrAssistedProvider(Dependency<?> dependency) {
Class<?> annotationType = dependency.getKey().getAnnotationType();
if (annotationType != null && annotationType.equals(Assisted.class)) { // If it's assisted..
if (dependency
.getKey()
.getTypeLiteral()
.getRawType()
.equals(Provider.class)) { // And a Provider...
return true;
}
} else if (dependency
.getKey()
.getTypeLiteral()
.getRawType()
.equals(Injector.class)) { // If it's the Injector...
return true;
}
return false;
}
/**
* Returns a key similar to {@code key}, but with an {@literal @}Assisted binding annotation. This
* fails if another binding annotation is clobbered in the process. If the key already has the
* {@literal @}Assisted annotation, it is returned as-is to preserve any String value.
*/
private <T> Key<T> assistKey(Method method, Key<T> key, Errors errors) throws ErrorsException {
if (key.getAnnotationType() == null) {
return key.withAnnotation(DEFAULT_ANNOTATION);
} else if (key.getAnnotationType() == Assisted.class) {
return key;
} else {
errors
.withSource(method)
.addMessage(
"Only @Assisted is allowed for factory parameters, but found @%s",
key.getAnnotationType());
throw errors.toException();
}
}
/**
* At injector-creation time, we initialize the invocation handler. At this time we make sure all
* factory methods will be able to build the target types.
*/
@Inject
@Toolable
void initialize(Injector injector) {
if (this.injector != null) {
throw new ConfigurationException(
ImmutableList.of(
new Message(
FactoryProvider2.class,
"Factories.create() factories may only be used in one Injector!")));
}
this.injector = injector;
for (Map.Entry<Method, AssistData> entry : assistDataByMethod.entrySet()) {
Method method = entry.getKey();
AssistData data = entry.getValue();
Object[] args;
if (!data.optimized) {
args = new Object[method.getParameterTypes().length];
Arrays.fill(args, "dummy object for validating Factories");
} else {
args = null; // won't be used -- instead will bind to data.providers.
}
getBindingFromNewInjector(
method, args, data); // throws if the binding isn't properly configured
}
}
/**
* Creates a child injector that binds the args, and returns the binding for the method's result.
*/
public Binding<?> getBindingFromNewInjector(
final Method method, final Object[] args, final AssistData data) {
checkState(
injector != null,
"Factories.create() factories cannot be used until they're initialized by Guice.");
final Key<?> returnType = data.returnType;
// We ignore any pre-existing binding annotation.
final Key<?> returnKey = Key.get(returnType.getTypeLiteral(), RETURN_ANNOTATION);
Module assistedModule =
new AbstractModule() {
@Override
@SuppressWarnings({
"unchecked",
"rawtypes"
}) // raw keys are necessary for the args array and return value
protected void configure() {
Binder binder = binder().withSource(method);
int p = 0;
if (!data.optimized) {
for (Key<?> paramKey : data.paramTypes) {
// Wrap in a Provider to cover null, and to prevent Guice from injecting the
// parameter
binder.bind((Key) paramKey).toProvider(Providers.of(args[p++]));
}
} else {
for (Key<?> paramKey : data.paramTypes) {
// Bind to our ThreadLocalProviders.
binder.bind((Key) paramKey).toProvider(data.providers.get(p++));
}
}
Constructor constructor = data.constructor;
// Constructor *should* always be non-null here,
// but if it isn't, we'll end up throwing a fairly good error
// message for the user.
if (constructor != null) {
binder
.bind(returnKey)
.toConstructor(constructor, (TypeLiteral) data.implementationType)
.in(Scopes.NO_SCOPE); // make sure we erase any scope on the implementation type
}
}
};
Injector forCreate = injector.createChildInjector(assistedModule);
Binding<?> binding = forCreate.getBinding(returnKey);
// If we have providers cached in data, cache the binding for future optimizations.
if (data.optimized) {
data.cachedBinding = binding;
}
return binding;
}
/**
* When a factory method is invoked, we create a child injector that binds all parameters, then
* use that to get an instance of the return type.
*/
@Override
public Object invoke(Object proxy, final Method method, final Object[] args) throws Throwable {
// If we setup a method handle earlier for this method, call it.
// This is necessary for default methods that java8 creates, so we
// can call the default method implementation (and not our proxied version of it).
if (methodHandleByMethod.containsKey(method)) {
return methodHandleByMethod.get(method).invokeWithArguments(args);
}
if (method.getDeclaringClass().equals(Object.class)) {
if ("equals".equals(method.getName())) {
return proxy == args[0];
} else if ("hashCode".equals(method.getName())) {
return System.identityHashCode(proxy);
} else {
return method.invoke(this, args);
}
}
AssistData data = assistDataByMethod.get(method);
checkState(data != null, "No data for method: %s", method);
Provider<?> provider;
if (data.cachedBinding != null) { // Try to get optimized form...
provider = data.cachedBinding.getProvider();
} else {
provider = getBindingFromNewInjector(method, args, data).getProvider();
}
try {
int p = 0;
for (ThreadLocalProvider tlp : data.providers) {
tlp.set(args[p++]);
}
return provider.get();
} catch (ProvisionException e) {
// if this is an exception declared by the factory method, throw it as-is
if (e.getErrorMessages().size() == 1) {
Message onlyError = getOnlyElement(e.getErrorMessages());
Throwable cause = onlyError.getCause();
if (cause != null && canRethrow(method, cause)) {
throw cause;
}
}
throw e;
} finally {
for (ThreadLocalProvider tlp : data.providers) {
tlp.remove();
}
}
}
@Override
public String toString() {
return factory.getClass().getInterfaces()[0].getName();
}
@Override
public int hashCode() {
return Objects.hashCode(factoryKey, collector);
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof FactoryProvider2)) {
return false;
}
FactoryProvider2<?> other = (FactoryProvider2<?>) obj;
return factoryKey.equals(other.factoryKey) && Objects.equal(collector, other.collector);
}
/** Returns true if {@code thrown} can be thrown by {@code invoked} without wrapping. */
static boolean canRethrow(Method invoked, Throwable thrown) {
if (thrown instanceof Error || thrown instanceof RuntimeException) {
return true;
}
for (Class<?> declared : invoked.getExceptionTypes()) {
if (declared.isInstance(thrown)) {
return true;
}
}
return false;
}
// not <T> because we'll never know and this is easier than suppressing warnings.
private static | literal |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/ClassOrderer.java | {
"start": 3539,
"end": 3914
} | class ____} to order; never {@code null}
*/
void orderClasses(ClassOrdererContext context);
/**
* {@code ClassOrderer} that allows to explicitly specify that the default
* ordering should be applied.
*
* <p>If the {@value #DEFAULT_ORDER_PROPERTY_NAME} is set, specifying this
* {@code ClassOrderer} has the same effect as referencing the configured
* | descriptors |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/maybe/MaybeRetryTest.java | {
"start": 1006,
"end": 5448
} | class ____ extends RxJavaTest {
@Test
public void retryTimesPredicateWithMatchingPredicate() {
final AtomicInteger atomicInteger = new AtomicInteger(3);
final AtomicInteger numberOfSubscribeCalls = new AtomicInteger(0);
Maybe.fromCallable(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
numberOfSubscribeCalls.incrementAndGet();
if (atomicInteger.decrementAndGet() != 0) {
throw new RuntimeException();
}
throw new IllegalArgumentException();
}
})
.retry(Integer.MAX_VALUE, new Predicate<Throwable>() {
@Override public boolean test(final Throwable throwable) throws Exception {
return !(throwable instanceof IllegalArgumentException);
}
})
.test()
.assertFailure(IllegalArgumentException.class);
assertEquals(3, numberOfSubscribeCalls.get());
}
@Test
public void retryTimesPredicateWithMatchingRetryAmount() {
final AtomicInteger atomicInteger = new AtomicInteger(3);
final AtomicInteger numberOfSubscribeCalls = new AtomicInteger(0);
Maybe.fromCallable(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
numberOfSubscribeCalls.incrementAndGet();
if (atomicInteger.decrementAndGet() != 0) {
throw new RuntimeException();
}
return true;
}
})
.retry(2, Functions.alwaysTrue())
.test()
.assertResult(true);
assertEquals(3, numberOfSubscribeCalls.get());
}
@Test
public void retryTimesPredicateWithNotMatchingRetryAmount() {
final AtomicInteger atomicInteger = new AtomicInteger(3);
final AtomicInteger numberOfSubscribeCalls = new AtomicInteger(0);
Maybe.fromCallable(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
numberOfSubscribeCalls.incrementAndGet();
if (atomicInteger.decrementAndGet() != 0) {
throw new RuntimeException();
}
return true;
}
})
.retry(1, Functions.alwaysTrue())
.test()
.assertFailure(RuntimeException.class);
assertEquals(2, numberOfSubscribeCalls.get());
}
@Test
public void retryTimesPredicateWithZeroRetries() {
final AtomicInteger atomicInteger = new AtomicInteger(2);
final AtomicInteger numberOfSubscribeCalls = new AtomicInteger(0);
Maybe.fromCallable(new Callable<Boolean>() {
@Override public Boolean call() throws Exception {
numberOfSubscribeCalls.incrementAndGet();
if (atomicInteger.decrementAndGet() != 0) {
throw new RuntimeException();
}
return true;
}
})
.retry(0, Functions.alwaysTrue())
.test()
.assertFailure(RuntimeException.class);
assertEquals(1, numberOfSubscribeCalls.get());
}
@Test
public void untilTrueJust() {
Maybe.just(1)
.retryUntil(() -> true)
.test()
.assertResult(1);
}
@Test
public void untilFalseJust() {
Maybe.just(1)
.retryUntil(() -> false)
.test()
.assertResult(1);
}
@Test
public void untilTrueEmpty() {
Maybe.empty()
.retryUntil(() -> true)
.test()
.assertResult();
}
@Test
public void untilFalseEmpty() {
Maybe.empty()
.retryUntil(() -> false)
.test()
.assertResult();
}
@Test
public void untilTrueError() {
Maybe.error(new TestException())
.retryUntil(() -> true)
.test()
.assertFailure(TestException.class);
}
@Test
public void untilFalseError() {
AtomicInteger counter = new AtomicInteger();
Maybe.defer(() -> {
if (counter.getAndIncrement() == 0) {
return Maybe.error(new TestException());
}
return Maybe.just(1);
})
.retryUntil(() -> false)
.test()
.assertResult(1);
}
}
| MaybeRetryTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumeSimpleDynamicDoneFileNameWithTwoDotsTest.java | {
"start": 1364,
"end": 2530
} | class ____ extends ContextTestSupport {
@Test
public void testSimpleDynamicDoneFileNameContainingTwoDots() throws Exception {
NotifyBuilder notify = new NotifyBuilder(context).whenDone(1).create();
getMockEndpoint("mock:result").expectedBodiesReceivedInAnyOrder("input-body");
template.sendBodyAndHeader(fileUri(), "input-body", Exchange.FILE_NAME, "test.twodot.txt");
template.sendBodyAndHeader(fileUri(), "done-body", Exchange.FILE_NAME, "test.twodot.done");
assertMockEndpointsSatisfied();
assertTrue(notify.matchesWaitTime());
assertFalse(Files.exists(testFile("test.twodot.txt")), "Input file should be deleted");
assertFalse(Files.exists(testFile("test.twodot.done")), "Done file should be deleted");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(fileUri("?doneFileName=$simple{file:name.noext}.done&initialDelay=0"))
.to("mock:result");
}
};
}
}
| FileConsumeSimpleDynamicDoneFileNameWithTwoDotsTest |
java | quarkusio__quarkus | integration-tests/oidc/src/test/java/io/quarkus/it/keycloak/TlsRegistryTestProfile.java | {
"start": 90,
"end": 244
} | class ____ implements QuarkusTestProfile {
@Override
public String getConfigProfile() {
return "tls-registry";
}
}
| TlsRegistryTestProfile |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/mapping/SingleTableSubclass.java | {
"start": 641,
"end": 1296
} | class ____ extends Subclass {
public SingleTableSubclass(PersistentClass superclass, MetadataBuildingContext buildingContext) {
super( superclass, buildingContext );
}
protected List<Property> getNonDuplicatedProperties() {
return new JoinedList<>( getSuperclass().getUnjoinedProperties(), getUnjoinedProperties() );
}
public Object accept(PersistentClassVisitor mv) {
return mv.accept( this );
}
public void validate(Metadata mapping) throws MappingException {
if ( getDiscriminator() == null ) {
throw new MappingException( "No discriminator defined by '" + getSuperclass().getEntityName()
+ "' which is a root | SingleTableSubclass |
java | google__dagger | javatests/dagger/internal/codegen/MembersInjectionTest.java | {
"start": 41644,
"end": 42375
} | interface ____ {",
" UsesInaccessibles usesInaccessibles();",
"}");
CompilerTests.daggerCompiler(inaccessible, inaccessiblesModule, usesInaccessibles, component)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(goldenFileRule.goldenSource("test/DaggerTestComponent"));
});
}
@Test
public void publicSupertypeHiddenSubtype() throws Exception {
Source foo =
CompilerTests.javaSource(
"other.Foo",
"package other;",
"",
"import javax.inject.Inject;",
"",
" | TestComponent |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/direct/DirectProducerBlockingTest.java | {
"start": 1374,
"end": 4114
} | class ____ extends ContextTestSupport {
@Test
public void testProducerBlocksForSuspendedConsumer() throws Exception {
DirectEndpoint endpoint = getMandatoryEndpoint("direct:suspended", DirectEndpoint.class);
endpoint.getConsumer().suspend();
StopWatch watch = new StopWatch();
CamelExecutionException e = assertThrows(CamelExecutionException.class,
() -> template.sendBody("direct:suspended?block=true&timeout=500", "hello world"),
"Expected CamelExecutionException");
DirectConsumerNotAvailableException cause
= assertIsInstanceOf(DirectConsumerNotAvailableException.class, e.getCause());
assertIsInstanceOf(CamelExchangeException.class, cause);
assertTrue(watch.taken() > 490);
assertTrue(watch.taken() < 5000);
}
@Test
public void testProducerBlocksWithNoConsumers() throws Exception {
DirectEndpoint endpoint = getMandatoryEndpoint("direct:suspended", DirectEndpoint.class);
endpoint.getConsumer().suspend();
StopWatch watch = new StopWatch();
CamelExecutionException e = assertThrows(CamelExecutionException.class,
() -> template.sendBody("direct:start?block=true&timeout=500", "hello world"),
"Expected CamelExecutionException");
DirectConsumerNotAvailableException cause
= assertIsInstanceOf(DirectConsumerNotAvailableException.class, e.getCause());
assertIsInstanceOf(CamelExchangeException.class, cause);
assertTrue(watch.taken() > 490);
assertTrue(watch.taken() < 5000);
}
@Test
public void testProducerBlocksResumeTest() throws Exception {
context.getRouteController().suspendRoute("foo");
ExecutorService executor = Executors.newSingleThreadExecutor();
executor.submit(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(200);
log.info("Resuming consumer");
context.getRouteController().resumeRoute("foo");
} catch (Exception e) {
// ignore
}
}
});
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:suspended?block=true&timeout=1000", "hello world");
assertMockEndpointsSatisfied();
executor.shutdownNow();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:suspended").routeId("foo").to("mock:result");
}
};
}
}
| DirectProducerBlockingTest |
java | apache__camel | components/camel-infinispan/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/remote/cluster/InfinispanRemoteClusterView.java | {
"start": 2141,
"end": 5097
} | class ____ extends InfinispanClusterView {
private static final Logger LOGGER = LoggerFactory.getLogger(InfinispanRemoteClusterView.class);
private final InfinispanRemoteClusterConfiguration configuration;
private final InfinispanRemoteManager manager;
private final LocalMember localMember;
private final LeadershipService leadership;
private RemoteCache<String, String> cache;
protected InfinispanRemoteClusterView(
InfinispanRemoteClusterService cluster,
InfinispanRemoteClusterConfiguration configuration,
String namespace) {
super(cluster, namespace);
this.configuration = configuration;
this.manager = new InfinispanRemoteManager(cluster.getCamelContext(), this.configuration.getConfiguration());
this.leadership = new LeadershipService();
this.localMember = new LocalMember(cluster.getId());
}
@SuppressWarnings("unchecked")
@Override
public void doStart() throws Exception {
super.doStart();
ServiceHelper.startService(manager);
this.cache = manager.getCache(getNamespace(), RemoteCache.class);
ServiceHelper.startService(leadership);
}
@Override
public void doStop() throws Exception {
super.doStop();
LOGGER.info("shutdown service: {}", getClusterService().getId());
ServiceHelper.stopService(leadership);
ServiceHelper.stopService(manager);
this.cache = null;
}
@Override
public CamelClusterMember getLocalMember() {
return this.localMember;
}
@Override
public List<CamelClusterMember> getMembers() {
return this.cache != null
? cache.keySet().stream()
.filter(negate(InfinispanClusterService.LEADER_KEY::equals))
.map(ClusterMember::new)
.collect(Collectors.toList())
: Collections.emptyList();
}
@Override
public Optional<CamelClusterMember> getLeader() {
if (this.cache == null) {
return Optional.empty();
}
String id = cache.get(InfinispanClusterService.LEADER_KEY);
if (id == null) {
return Optional.empty();
}
return Optional.of(new ClusterMember(id));
}
@Override
protected boolean isLeader(String id) {
if (this.cache == null) {
return false;
}
if (id == null) {
return false;
}
final String key = InfinispanClusterService.LEADER_KEY;
final String val = this.cache.get(key);
return Objects.equals(id, val);
}
// *****************************************
//
// Service
//
// *****************************************
@ClientListener
private final | InfinispanRemoteClusterView |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/physical/stream/PushCalcPastChangelogNormalizeRule.java | {
"start": 3546,
"end": 17190
} | class ____
extends RelRule<PushCalcPastChangelogNormalizeRule.Config> {
public static final RelOptRule INSTANCE =
new PushCalcPastChangelogNormalizeRule(Config.DEFAULT);
public PushCalcPastChangelogNormalizeRule(Config config) {
super(config);
}
@Override
public boolean matches(RelOptRuleCall call) {
final StreamPhysicalChangelogNormalize changelogNormalize = call.rel(1);
return (!changelogNormalize.sourceReused() || changelogNormalize.commonFilter().length > 0)
&& super.matches(call);
}
@Override
public void onMatch(RelOptRuleCall call) {
final StreamPhysicalCalc calc = call.rel(0);
final StreamPhysicalChangelogNormalize changelogNormalize = call.rel(1);
final Set<Integer> primaryKeyIndices =
IntStream.of(changelogNormalize.uniqueKeys()).boxed().collect(Collectors.toSet());
// Determine which filters can be pushed (= involve only primary key columns)
final List<RexNode> primaryKeyPredicates = new ArrayList<>();
final List<RexNode> otherPredicates = new ArrayList<>();
final RexBuilder rexBuilder = call.builder().getRexBuilder();
final List<RexNode> conditions =
getCommonConditions(
rexBuilder, changelogNormalize.commonFilter(), calc.getProgram());
if (!conditions.isEmpty()) {
partitionPrimaryKeyPredicates(
conditions, primaryKeyIndices, primaryKeyPredicates, otherPredicates);
}
if (changelogNormalize.filterCondition() != null) {
otherPredicates.add(changelogNormalize.filterCondition());
}
// used input field indices
int[] usedInputFields = extractUsedInputFields(calc, changelogNormalize, primaryKeyIndices);
final RexExecutor rexExecutor = calc.getCluster().getPlanner().getExecutor();
// Construct a new ChangelogNormalize which has used fields project
// and primary key filters pushed into it
final StreamPhysicalChangelogNormalize newChangelogNormalize =
pushCalcThroughChangelogNormalize(
call, primaryKeyPredicates, otherPredicates, usedInputFields, rexExecutor);
final List<RexNode> nonCommonConditions =
getNonCommonConditions(
rexBuilder, changelogNormalize.commonFilter(), calc.getProgram());
// Retain only filters which haven't been pushed
transformWithRemainingPredicates(
call, newChangelogNormalize, usedInputFields, nonCommonConditions, rexExecutor);
}
private List<RexNode> getCommonConditions(
RexBuilder rexBuilder, RexNode[] commonFilter, RexProgram rexProgram) {
if (commonFilter.length > 0) {
return List.of(commonFilter);
}
return FlinkRexUtil.extractConjunctiveConditions(rexBuilder, rexProgram);
}
private List<RexNode> getNonCommonConditions(
RexBuilder rexBuilder, RexNode[] commonFilter, RexProgram rexProgram) {
if (commonFilter.length > 0) {
List<RexNode> conditionsFromProgram =
FlinkRexUtil.extractConjunctiveConditions(rexBuilder, rexProgram);
conditionsFromProgram.removeAll(List.of(commonFilter));
return conditionsFromProgram;
}
return List.of();
}
/** Extracts input fields which are used in the Calc node and the ChangelogNormalize node. */
private int[] extractUsedInputFields(
StreamPhysicalCalc calc,
StreamPhysicalChangelogNormalize changelogNormalize,
Set<Integer> primaryKeyIndices) {
RexProgram program = calc.getProgram();
List<RexNode> projectsAndCondition =
program.getProjectList().stream()
.map(program::expandLocalRef)
.collect(Collectors.toList());
if (program.getCondition() != null) {
projectsAndCondition.add(program.expandLocalRef(program.getCondition()));
}
if (changelogNormalize.filterCondition() != null) {
projectsAndCondition.add(changelogNormalize.filterCondition());
}
Set<Integer> projectedFields =
Arrays.stream(extractRefInputFields(projectsAndCondition))
.boxed()
.collect(Collectors.toSet());
// we can't project primary keys
projectedFields.addAll(primaryKeyIndices);
return projectedFields.stream().sorted().mapToInt(Integer::intValue).toArray();
}
/**
* Separates the given {@param predicates} into filters which affect only the primary key and
* anything else.
*/
private void partitionPrimaryKeyPredicates(
List<RexNode> predicates,
Set<Integer> primaryKeyIndices,
List<RexNode> primaryKeyPredicates,
List<RexNode> remainingPredicates) {
for (RexNode predicate : predicates) {
int[] inputRefs = extractRefInputFields(Collections.singletonList(predicate));
if (Arrays.stream(inputRefs).allMatch(primaryKeyIndices::contains)) {
primaryKeyPredicates.add(predicate);
} else {
remainingPredicates.add(predicate);
}
}
}
/**
* Pushes {@param primaryKeyPredicates} and used fields project into the {@link
* StreamPhysicalChangelogNormalize}.
*/
private StreamPhysicalChangelogNormalize pushCalcThroughChangelogNormalize(
RelOptRuleCall call,
List<RexNode> primaryKeyPredicates,
List<RexNode> otherPredicates,
int[] usedInputFields,
RexExecutor rexExecutor) {
final StreamPhysicalChangelogNormalize changelogNormalize = call.rel(1);
final StreamPhysicalExchange exchange = call.rel(2);
final Set<Integer> primaryKeyIndices =
IntStream.of(changelogNormalize.uniqueKeys()).boxed().collect(Collectors.toSet());
if (primaryKeyPredicates.isEmpty()
&& usedInputFields.length == changelogNormalize.getRowType().getFieldCount()) {
// Check if there is a condition that can be pushed into the ChangelogNormalize
if (otherPredicates.isEmpty()) {
return changelogNormalize;
} else {
final RexNode condition =
FlinkRexUtil.simplify(
call.builder().getRexBuilder(),
call.builder().and(otherPredicates),
rexExecutor);
return (StreamPhysicalChangelogNormalize)
changelogNormalize.copy(
changelogNormalize.getTraitSet(),
exchange,
changelogNormalize.uniqueKeys(),
condition.isAlwaysTrue() ? null : condition);
}
}
final StreamPhysicalCalc pushedCalc =
projectUsedFieldsWithConditions(
call.builder(),
exchange.getInput(),
primaryKeyPredicates,
usedInputFields,
rexExecutor);
// build input field reference from old field index to new field index
final Map<Integer, Integer> inputRefMapping = buildFieldsMapping(usedInputFields);
final List<Integer> newPrimaryKeyIndices =
primaryKeyIndices.stream().map(inputRefMapping::get).collect(Collectors.toList());
// add conditions
final List<RexNode> shiftedPredicates =
otherPredicates.stream()
.map(p -> adjustInputRef(p, inputRefMapping))
.collect(Collectors.toList());
final RexNode condition = call.builder().and(shiftedPredicates);
final FlinkRelDistribution newDistribution =
FlinkRelDistribution.hash(newPrimaryKeyIndices, true);
final RelTraitSet newTraitSet = exchange.getTraitSet().replace(newDistribution);
final StreamPhysicalExchange newExchange =
exchange.copy(newTraitSet, pushedCalc, newDistribution);
return (StreamPhysicalChangelogNormalize)
changelogNormalize.copy(
changelogNormalize.getTraitSet(),
newExchange,
newPrimaryKeyIndices.stream().mapToInt(Integer::intValue).toArray(),
condition.isAlwaysTrue() ? null : condition);
}
/**
* Builds a new {@link StreamPhysicalCalc} on the input node with the given {@param conditions}
* and a used fields projection.
*/
private StreamPhysicalCalc projectUsedFieldsWithConditions(
RelBuilder relBuilder,
RelNode input,
List<RexNode> conditions,
int[] usedFields,
RexExecutor rexExecutor) {
final RelDataType inputRowType = input.getRowType();
final List<String> inputFieldNames = inputRowType.getFieldNames();
final RexProgramBuilder programBuilder =
new RexProgramBuilder(inputRowType, relBuilder.getRexBuilder());
// add project
for (int fieldIndex : usedFields) {
programBuilder.addProject(
programBuilder.makeInputRef(fieldIndex), inputFieldNames.get(fieldIndex));
}
// add conditions
final RexNode condition = relBuilder.and(conditions);
if (!condition.isAlwaysTrue()) {
programBuilder.addCondition(
FlinkRexUtil.simplify(relBuilder.getRexBuilder(), condition, rexExecutor));
}
final RexProgram newProgram = programBuilder.getProgram();
return new StreamPhysicalCalc(
input.getCluster(),
input.getTraitSet(),
input,
newProgram,
newProgram.getOutputRowType());
}
/**
* Transforms the {@link RelOptRuleCall} to use {@param changelogNormalize} as the new input to
* a {@link StreamPhysicalCalc} which uses {@param predicates} for the condition.
*/
private void transformWithRemainingPredicates(
RelOptRuleCall call,
StreamPhysicalChangelogNormalize changelogNormalize,
int[] usedInputFields,
List<RexNode> conditions,
RexExecutor rexExecutor) {
final StreamPhysicalCalc calc = call.rel(0);
final RelBuilder relBuilder = call.builder();
final RexProgramBuilder programBuilder =
new RexProgramBuilder(changelogNormalize.getRowType(), relBuilder.getRexBuilder());
final Map<Integer, Integer> inputRefMapping = buildFieldsMapping(usedInputFields);
// add projects
for (Pair<RexLocalRef, String> ref : calc.getProgram().getNamedProjects()) {
RexNode shiftedProject =
adjustInputRef(calc.getProgram().expandLocalRef(ref.left), inputRefMapping);
programBuilder.addProject(shiftedProject, ref.right);
}
if (!conditions.isEmpty()) {
final RexNode condition = relBuilder.and(conditions);
final RexNode simplifiedCondition =
FlinkRexUtil.simplify(relBuilder.getRexBuilder(), condition, rexExecutor);
if (!simplifiedCondition.isAlwaysTrue()) {
programBuilder.addCondition(adjustInputRef(simplifiedCondition, inputRefMapping));
}
}
final RexProgram newProgram = programBuilder.getProgram();
if (newProgram.isTrivial()) {
call.transformTo(changelogNormalize);
} else {
changelogNormalize.setCommonFilter(new RexNode[0]);
final StreamPhysicalCalc newProjectedCalc =
new StreamPhysicalCalc(
changelogNormalize.getCluster(),
changelogNormalize.getTraitSet(),
changelogNormalize,
newProgram,
newProgram.getOutputRowType());
call.transformTo(newProjectedCalc);
}
}
/** Adjust the {@param expr} field indices according to the field index {@param mapping}. */
private RexNode adjustInputRef(RexNode expr, Map<Integer, Integer> mapping) {
return expr.accept(
new RexShuttle() {
@Override
public RexNode visitInputRef(RexInputRef inputRef) {
Integer newIndex = mapping.get(inputRef.getIndex());
return new RexInputRef(newIndex, inputRef.getType());
}
});
}
/** Build field reference mapping from old field index to new field index after projection. */
private Map<Integer, Integer> buildFieldsMapping(int[] projectedInputRefs) {
final Map<Integer, Integer> fieldsOldToNewIndexMapping = new HashMap<>();
for (int i = 0; i < projectedInputRefs.length; i++) {
fieldsOldToNewIndexMapping.put(projectedInputRefs[i], i);
}
return fieldsOldToNewIndexMapping;
}
// ---------------------------------------------------------------------------------------------
/** Configuration for {@link PushCalcPastChangelogNormalizeRule}. */
@Value.Immutable(singleton = false)
public | PushCalcPastChangelogNormalizeRule |
java | apache__camel | components/camel-telegram/src/test/java/org/apache/camel/component/telegram/TelegramConsumerHealthCheckErrorTest.java | {
"start": 1509,
"end": 4744
} | class ____ extends TelegramTestSupport {
@EndpointInject("mock:telegram")
private MockEndpoint endpoint;
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
// enabling consumers health check is a bit cumbersome via low-level Java code
HealthCheckRegistry hcr = context.getCamelContextExtension().getContextPlugin(HealthCheckRegistry.class);
HealthCheckRepository repo
= hcr.getRepository("consumers").orElse((HealthCheckRepository) hcr.resolveById("consumers"));
repo.setEnabled(true);
hcr.register(repo);
return context;
}
@Test
public void testReceptionOfTwoMessages() {
HealthCheckRegistry hcr = context.getCamelContextExtension().getContextPlugin(HealthCheckRegistry.class);
HealthCheckRepository repo = hcr.getRepository("consumers").get();
// wait until HC is DOWN
Awaitility.waitAtMost(5, TimeUnit.SECONDS).until(
() -> repo.stream().anyMatch(h -> h.call().getState().equals(HealthCheck.State.DOWN)));
// if we grab the health check by id, we can also check it afterwards
HealthCheck hc = hcr.getCheck("consumer:telegram").get();
// wait until we have the error
Awaitility.waitAtMost(5, TimeUnit.SECONDS).until(
() -> {
HealthCheck.Result rc = hc.call();
Long count = (Long) rc.getDetails().get(HealthCheck.FAILURE_ERROR_COUNT);
return count != null && count > 0;
});
HealthCheck.Result rc = hc.call();
// and get the detailed error message (and exception)
Assertions.assertEquals(HealthCheck.State.DOWN, rc.getState());
String msg = rc.getMessage().get();
long count = (long) rc.getDetails().get(HealthCheck.FAILURE_ERROR_COUNT);
Assertions.assertEquals("Consumer failed polling " + count + " times route: telegram (telegram://bots)", msg);
// test that the uri is masked
Assertions.assertEquals("telegram://bots?authorizationToken=xxxxxx",
rc.getDetails().get(HealthCheck.ENDPOINT_URI));
Throwable e = rc.getError().get();
Assertions.assertTrue(e.getMessage().contains("401"));
Assertions.assertEquals(401, rc.getDetails().get(HealthCheck.HTTP_RESPONSE_CODE));
}
@Override
protected RoutesBuilder[] createRouteBuilders() {
return new RoutesBuilder[] {
getMockRoutes(),
new RouteBuilder() {
@Override
public void configure() {
from("telegram:bots?authorizationToken=mock-token").routeId("telegram")
.convertBodyTo(String.class)
.to("mock:telegram");
}
} };
}
@Override
protected TelegramMockRoutes createMockRoutes() {
return new TelegramMockRoutes(port)
.addErrorEndpoint(
"getUpdates",
"GET",
401);
}
}
| TelegramConsumerHealthCheckErrorTest |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/DisabledForJreRange.java | {
"start": 5983,
"end": 6738
} | enum ____ exists for the particular JRE version,
* you can specify the maximum version via {@link #max() max} instead.
*
* <p>Defaults to {@code -1} to signal that {@link #max() max} should be used
* instead.
*
* @since 5.12
* @see #max()
* @see JRE#version()
* @see Runtime.Version#feature()
*/
@API(status = MAINTAINED, since = "5.13.3")
int maxVersion() default -1;
/**
* Custom reason to provide if the test or container is disabled.
*
* <p>If a custom reason is supplied, it will be combined with the default
* reason for this annotation. If a custom reason is not supplied, the default
* reason will be used.
*
* @since 5.7
*/
@API(status = STABLE, since = "5.7")
String disabledReason() default "";
}
| constant |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/redshift/parser/RedshiftSelectParser.java | {
"start": 712,
"end": 3142
} | class ____
extends PGSelectParser {
public RedshiftSelectParser(SQLExprParser exprParser, SQLSelectListCache selectListCache) {
super(exprParser, selectListCache);
dbType = DbType.redshift;
}
protected RedshiftExprParser createExprParser() {
return new RedshiftExprParser(lexer);
}
public SQLSelectQuery query(SQLObject parent, boolean acceptUnion) {
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
SQLSelectQuery select = query();
accept(Token.RPAREN);
return queryRest(select, acceptUnion);
}
RedshiftSelectQueryBlock queryBlock = new RedshiftSelectQueryBlock();
if (lexer.token() == Token.SELECT) {
lexer.nextToken();
if (lexer.token() == Token.TOP) {
SQLTop top = this.createExprParser().parseTop();
queryBlock.setTop(top);
}
if (lexer.token() == Token.DISTINCT) {
queryBlock.setDistionOption(SQLSetQuantifier.DISTINCT);
lexer.nextToken();
} else if (lexer.token() == Token.ALL) {
queryBlock.setDistionOption(SQLSetQuantifier.ALL);
lexer.nextToken();
}
parseSelectList(queryBlock);
}
parseInto(queryBlock);
parseFrom(queryBlock);
//TODO(lingo): Support oracle-style outer join, see https://docs.aws.amazon.com/redshift/latest/dg/r_WHERE_oracle_outer.html
parseWhere(queryBlock);
parseHierachical(queryBlock);
parseGroupBy(queryBlock);
qualify(queryBlock);
parseSortBy(queryBlock);
parseFetchClause(queryBlock);
return queryRest(queryBlock, acceptUnion);
}
protected void parseInto(RedshiftSelectQueryBlock queryBlock) {
if (lexer.token() == Token.INTO) {
lexer.nextToken();
if (lexer.nextIfIdentifier("TEMP")) {
queryBlock.setInsertTemp(true);
}
if (lexer.nextIfIdentifier("TEMPORARY")) {
queryBlock.setInsertTemporary(true);
}
if (lexer.nextIf(Token.TABLE)) {
queryBlock.setInsertTable(true);
}
SQLTableSource into = this.parseTableSource();
queryBlock.setInto((SQLExprTableSource) into);
}
}
}
| RedshiftSelectParser |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStatsTests.java | {
"start": 764,
"end": 6442
} | class ____ extends AbstractXContentSerializingTestCase<ModelSizeStats> {
public void testDefaultConstructor() {
ModelSizeStats stats = new ModelSizeStats.Builder("foo").build();
assertEquals(0, stats.getModelBytes());
assertNull(stats.getPeakModelBytes());
assertNull(stats.getModelBytesExceeded());
assertNull(stats.getModelBytesMemoryLimit());
assertEquals(0, stats.getTotalByFieldCount());
assertEquals(0, stats.getTotalOverFieldCount());
assertEquals(0, stats.getTotalPartitionFieldCount());
assertEquals(0, stats.getBucketAllocationFailuresCount());
assertEquals(MemoryStatus.OK, stats.getMemoryStatus());
assertNull(stats.getAssignmentMemoryBasis());
assertNull(stats.getOutputMemmoryAllocatorBytes());
assertEquals(0, stats.getCategorizedDocCount());
assertEquals(0, stats.getTotalCategoryCount());
assertEquals(0, stats.getFrequentCategoryCount());
assertEquals(0, stats.getRareCategoryCount());
assertEquals(0, stats.getDeadCategoryCount());
assertEquals(0, stats.getFailedCategoryCount());
assertEquals(CategorizationStatus.OK, stats.getCategorizationStatus());
}
public void testSetMemoryStatus_GivenNull() {
ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo");
NullPointerException ex = expectThrows(NullPointerException.class, () -> stats.setMemoryStatus(null));
assertEquals("[memory_status] must not be null", ex.getMessage());
}
public void testSetMemoryStatus_GivenSoftLimit() {
ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo");
stats.setMemoryStatus(MemoryStatus.SOFT_LIMIT);
assertEquals(MemoryStatus.SOFT_LIMIT, stats.build().getMemoryStatus());
}
@Override
protected ModelSizeStats createTestInstance() {
return createRandomized();
}
@Override
protected ModelSizeStats mutateInstance(ModelSizeStats instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
public static ModelSizeStats createRandomized() {
ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo");
if (randomBoolean()) {
stats.setBucketAllocationFailuresCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setModelBytes(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setPeakModelBytes(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setModelBytesExceeded(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setModelBytesMemoryLimit(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setTotalByFieldCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setTotalOverFieldCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setTotalPartitionFieldCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setLogTime(new Date(randomTimeValue().millis()));
}
if (randomBoolean()) {
stats.setTimestamp(new Date(randomTimeValue().millis()));
}
if (randomBoolean()) {
stats.setMemoryStatus(randomFrom(MemoryStatus.values()));
}
if (randomBoolean()) {
stats.setAssignmentMemoryBasis(randomFrom(ModelSizeStats.AssignmentMemoryBasis.values()));
}
if (randomBoolean()) {
stats.setCategorizedDocCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setTotalCategoryCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setFrequentCategoryCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setRareCategoryCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setDeadCategoryCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setFailedCategoryCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setCategorizationStatus(randomFrom(CategorizationStatus.values()));
}
return stats.build();
}
@Override
protected Reader<ModelSizeStats> instanceReader() {
return ModelSizeStats::new;
}
@Override
protected ModelSizeStats doParseInstance(XContentParser parser) {
return ModelSizeStats.STRICT_PARSER.apply(parser, null).build();
}
public void testId() {
ModelSizeStats stats = new ModelSizeStats.Builder("job-foo").setLogTime(new Date(100)).build();
assertEquals("job-foo_model_size_stats_100", stats.getId());
}
public void testStrictParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> ModelSizeStats.STRICT_PARSER.apply(parser, null)
);
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"job_id\":\"job_1\", \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
ModelSizeStats.LENIENT_PARSER.apply(parser, null);
}
}
}
| ModelSizeStatsTests |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ResponseHeadersCollector.java | {
"start": 708,
"end": 2214
} | class ____ {
private final ThreadContext threadContext;
private final Queue<Map<String, List<String>>> collected = ConcurrentCollections.newQueue();
public ResponseHeadersCollector(ThreadContext threadContext) {
this.threadContext = threadContext;
}
/**
* Called when a child request is completed to collect the response headers of the responding thread
*/
public void collect() {
Map<String, List<String>> responseHeaders = threadContext.getResponseHeaders();
if (responseHeaders.isEmpty() == false) {
collected.add(responseHeaders);
}
}
/**
* Called when all child requests are completed. This will merge all collected response headers
* from the child requests and restore to the current thread.
*/
public void finish() {
final Map<String, Set<String>> merged = new HashMap<>();
Map<String, List<String>> resp;
while ((resp = collected.poll()) != null) {
for (Map.Entry<String, List<String>> e : resp.entrySet()) {
// Use LinkedHashSet to retain the order of the values
merged.computeIfAbsent(e.getKey(), k -> new LinkedHashSet<>(e.getValue().size())).addAll(e.getValue());
}
}
for (Map.Entry<String, Set<String>> e : merged.entrySet()) {
for (String v : e.getValue()) {
threadContext.addResponseHeader(e.getKey(), v);
}
}
}
}
| ResponseHeadersCollector |
java | jhy__jsoup | src/test/java/org/jsoup/select/SelectorTest.java | {
"start": 35310,
"end": 35466
} | class ____ space", found.get(0).text());
found = doc.select("div[class=\"value \"]");
assertEquals(1, found.size());
assertEquals(" | with |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/suggest/completion/FuzzyOptions.java | {
"start": 1220,
"end": 7309
} | class ____ implements ToXContentFragment, Writeable {
static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy");
private static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions");
private static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length");
private static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length");
private static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware");
private static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states");
/**
* fuzzy : {
* "edit_distance" : STRING | INT
* "transpositions" : BOOLEAN
* "min_length" : INT
* "prefix_length" : INT
* "unicode_aware" : BOOLEAN
* "max_determinized_states" : INT
* }
*/
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(FUZZY_OPTIONS.getPreferredName(), Builder::new);
static {
PARSER.declareInt(Builder::setFuzzyMinLength, MIN_LENGTH_FIELD);
PARSER.declareInt(Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES_FIELD);
PARSER.declareBoolean(Builder::setUnicodeAware, UNICODE_AWARE_FIELD);
PARSER.declareInt(Builder::setFuzzyPrefixLength, PREFIX_LENGTH_FIELD);
PARSER.declareBoolean(Builder::setTranspositions, TRANSPOSITION_FIELD);
PARSER.declareField(Builder::setFuzziness, Fuzziness::parse, Fuzziness.FIELD, ObjectParser.ValueType.VALUE);
}
static FuzzyOptions parse(XContentParser parser) throws IOException {
return PARSER.parse(parser, null).build();
}
public static Builder builder() {
return new Builder();
}
private final int editDistance;
private final boolean transpositions;
private final int fuzzyMinLength;
private final int fuzzyPrefixLength;
private final boolean unicodeAware;
private final int maxDeterminizedStates;
private FuzzyOptions(
int editDistance,
boolean transpositions,
int fuzzyMinLength,
int fuzzyPrefixLength,
boolean unicodeAware,
int maxDeterminizedStates
) {
this.editDistance = editDistance;
this.transpositions = transpositions;
this.fuzzyMinLength = fuzzyMinLength;
this.fuzzyPrefixLength = fuzzyPrefixLength;
this.unicodeAware = unicodeAware;
this.maxDeterminizedStates = maxDeterminizedStates;
}
/**
* Read from a stream.
*/
FuzzyOptions(StreamInput in) throws IOException {
transpositions = in.readBoolean();
unicodeAware = in.readBoolean();
editDistance = in.readVInt();
fuzzyMinLength = in.readVInt();
fuzzyPrefixLength = in.readVInt();
maxDeterminizedStates = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(transpositions);
out.writeBoolean(unicodeAware);
out.writeVInt(editDistance);
out.writeVInt(fuzzyMinLength);
out.writeVInt(fuzzyPrefixLength);
out.writeVInt(maxDeterminizedStates);
}
/**
* Returns the maximum number of edits
*/
public int getEditDistance() {
return editDistance;
}
/**
* Returns if transpositions option is set
*
* if transpositions is set, then swapping one character for another counts as one edit instead of two.
*/
public boolean isTranspositions() {
return transpositions;
}
/**
* Returns the length of input prefix after which edits are applied
*/
public int getFuzzyMinLength() {
return fuzzyMinLength;
}
/**
* Returns the minimum length of the input prefix required to apply any edits
*/
public int getFuzzyPrefixLength() {
return fuzzyPrefixLength;
}
/**
* Returns if all measurements (like edit distance, transpositions and lengths) are in unicode code
* points (actual letters) instead of bytes.
*/
public boolean isUnicodeAware() {
return unicodeAware;
}
/**
* Returns the maximum automaton states allowed for fuzzy expansion
*/
public int getMaxDeterminizedStates() {
return maxDeterminizedStates;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FuzzyOptions that = (FuzzyOptions) o;
if (editDistance != that.editDistance) return false;
if (transpositions != that.transpositions) return false;
if (fuzzyMinLength != that.fuzzyMinLength) return false;
if (fuzzyPrefixLength != that.fuzzyPrefixLength) return false;
if (unicodeAware != that.unicodeAware) return false;
return maxDeterminizedStates == that.maxDeterminizedStates;
}
@Override
public int hashCode() {
int result = editDistance;
result = 31 * result + (transpositions ? 1 : 0);
result = 31 * result + fuzzyMinLength;
result = 31 * result + fuzzyPrefixLength;
result = 31 * result + (unicodeAware ? 1 : 0);
result = 31 * result + maxDeterminizedStates;
return result;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(FUZZY_OPTIONS.getPreferredName());
builder.field(Fuzziness.FIELD.getPreferredName(), editDistance);
builder.field(TRANSPOSITION_FIELD.getPreferredName(), transpositions);
builder.field(MIN_LENGTH_FIELD.getPreferredName(), fuzzyMinLength);
builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), fuzzyPrefixLength);
builder.field(UNICODE_AWARE_FIELD.getPreferredName(), unicodeAware);
builder.field(MAX_DETERMINIZED_STATES_FIELD.getPreferredName(), maxDeterminizedStates);
builder.endObject();
return builder;
}
/**
* Options for fuzzy queries
*/
public static | FuzzyOptions |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java | {
"start": 718,
"end": 2192
} | class ____ extends AbstractNamedExpressionSerializationTests<Alias> {
public static Alias randomAlias() {
Source source = SourceTests.randomSource();
String name = randomAlphaOfLength(5);
// TODO better randomChild
Expression child = ReferenceAttributeTests.randomReferenceAttribute(false);
boolean synthetic = randomBoolean();
return new Alias(source, name, child, new NameId(), synthetic);
}
@Override
protected Alias createTestInstance() {
return randomAlias();
}
@Override
protected Alias mutateInstance(Alias instance) throws IOException {
Source source = instance.source();
String name = instance.name();
Expression child = instance.child();
boolean synthetic = instance.synthetic();
switch (between(0, 2)) {
case 0 -> name = randomAlphaOfLength(name.length() + 1);
case 1 -> child = randomValueOtherThan(child, () -> ReferenceAttributeTests.randomReferenceAttribute(false));
case 2 -> synthetic = false == synthetic;
}
return new Alias(source, name, child, instance.id(), synthetic);
}
@Override
protected boolean alwaysEmptySource() {
return true;
}
@Override
protected Alias mutateNameId(Alias instance) {
return instance.withId(new NameId());
}
@Override
protected boolean equalityIgnoresId() {
return false;
}
}
| AliasTests |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/main/java/org/jboss/resteasy/reactive/server/vertx/VertxResteasyReactiveRequestContext.java | {
"start": 17639,
"end": 18962
} | class ____ extends VertxJavaIoContext {
public ResteasyVertxJavaIoContext(RoutingContext context, int minChunkSize, int outputBufferSize) {
super(context, minChunkSize, outputBufferSize);
}
@Override
public Optional<String> getContentLength() {
if (getRoutingContext().request().response().headers().contains(HttpHeaderNames.CONTENT_LENGTH)) {
return Optional.empty();
}
final LazyResponse lazyResponse = VertxResteasyReactiveRequestContext.this.getResponse();
if (!lazyResponse.isCreated()) {
return Optional.empty();
}
MultivaluedMap<String, Object> responseHeaders = lazyResponse.get().getHeaders();
if (responseHeaders != null) {
// we need to make sure the content-length header is copied to Vert.x headers
// otherwise we could run into a race condition: see https://github.com/quarkusio/quarkus/issues/26599
Object contentLength = responseHeaders.getFirst(HttpHeaders.CONTENT_LENGTH);
if (contentLength != null) {
return Optional.of(contentLength.toString());
}
}
return Optional.empty();
}
}
}
| ResteasyVertxJavaIoContext |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/TelemetryMetrics/SearchTookTimeTelemetryTests.java | {
"start": 2823,
"end": 41468
} | class ____ extends ESSingleNodeTestCase {
private static final String indexName = "test_search_metrics2";
private static final String indexNameNanoPrecision = "nano_search_metrics2";
private static final String singleShardIndexName = "single_shard_test_search_metric";
private static final LocalDateTime NOW = LocalDateTime.now(ZoneOffset.UTC);
private static final DateTimeFormatter FORMATTER_MILLIS = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT);
private static final DateTimeFormatter FORMATTER_NANOS = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.nnnnnnnnn", Locale.ROOT);
@Before
public void setUpIndex() {
var num_primaries = randomIntBetween(2, 4);
createIndex(
indexName,
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, num_primaries)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.build()
);
ensureGreen(indexName);
prepareIndex(indexName).setId("1")
.setSource("body", "foo", "@timestamp", "2024-11-01", "event.ingested", "2024-11-01")
.setRefreshPolicy(IMMEDIATE)
.get();
prepareIndex(indexName).setId("2")
.setSource("body", "foo", "@timestamp", "2024-12-01", "event.ingested", "2024-12-01")
.setRefreshPolicy(IMMEDIATE)
.get();
// we use a single shard index to test the case where query and fetch execute in the same round-trip
createIndex(
singleShardIndexName,
Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build()
);
ensureGreen(singleShardIndexName);
prepareIndex(singleShardIndexName).setId("1")
.setSource("body", "foo", "@timestamp", NOW.minusMinutes(5).withSecond(randomIntBetween(0, 59)).format(FORMATTER_MILLIS))
.setRefreshPolicy(IMMEDIATE)
.get();
prepareIndex(singleShardIndexName).setId("2")
.setSource("body", "foo", "@timestamp", NOW.minusMinutes(30).withSecond(randomIntBetween(0, 59)).format(FORMATTER_MILLIS))
.setRefreshPolicy(IMMEDIATE)
.get();
createIndex(
indexNameNanoPrecision,
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, num_primaries)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.build(),
"_doc",
"@timestamp",
"type=date_nanos"
);
ensureGreen(indexNameNanoPrecision);
prepareIndex(indexNameNanoPrecision).setId("10")
.setSource(
"body",
"foo",
"@timestamp",
NOW.minusMinutes(2).withNano(randomIntBetween(0, 1_000_000_000)).format(FORMATTER_NANOS)
)
.setRefreshPolicy(IMMEDIATE)
.get();
prepareIndex(indexNameNanoPrecision).setId("11")
.setSource(
"body",
"foo",
"@timestamp",
NOW.minusMinutes(3).withNano(randomIntBetween(0, 1_000_000_000)).format(FORMATTER_NANOS)
)
.setRefreshPolicy(IMMEDIATE)
.get();
prepareIndex(indexNameNanoPrecision).setId("12")
.setSource(
"body",
"foo",
"@timestamp",
NOW.minusMinutes(4).withNano(randomIntBetween(0, 1_000_000_000)).format(FORMATTER_NANOS)
)
.setRefreshPolicy(IMMEDIATE)
.get();
prepareIndex(indexNameNanoPrecision).setId("13")
.setSource(
"body",
"foo",
"@timestamp",
NOW.minusMinutes(5).withNano(randomIntBetween(0, 1_000_000_000)).format(FORMATTER_NANOS)
)
.setRefreshPolicy(IMMEDIATE)
.get();
prepareIndex(indexNameNanoPrecision).setId("14")
.setSource(
"body",
"foo",
"@timestamp",
NOW.minusMinutes(6).withNano(randomIntBetween(0, 1_000_000_000)).format(FORMATTER_NANOS)
)
.setRefreshPolicy(IMMEDIATE)
.get();
prepareIndex(indexNameNanoPrecision).setId("15")
.setSource(
"body",
"foo",
"@timestamp",
NOW.minusMinutes(75).withNano(randomIntBetween(0, 1_000_000_000)).format(FORMATTER_NANOS)
)
.setRefreshPolicy(IMMEDIATE)
.get();
}
@After
public void afterTest() {
resetMeter();
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(TestTelemetryPlugin.class);
}
public void testOthersDottedIndexName() {
createIndex(".whatever");
createIndex(".kibana");
{
SearchResponse searchResponse = client().prepareSearch(".whatever").setQuery(simpleQueryStringQuery("foo")).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse);
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(3, attributes.size());
assertEquals(".others", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
}
{
SearchResponse searchResponse = client().prepareSearch(".kibana*").setQuery(simpleQueryStringQuery("foo")).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse);
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(2, measurements.size());
Measurement measurement = measurements.getLast();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(3, attributes.size());
assertEquals(".kibana", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
}
{
SearchResponse searchResponse = client().prepareSearch(".*").setQuery(simpleQueryStringQuery("foo")).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse);
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(3, measurements.size());
Measurement measurement = measurements.getLast();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
// two dotted indices: categorized as "user"
assertSimpleQueryAttributes(measurement.attributes());
}
{
SearchResponse searchResponse = client().prepareSearch(".kibana", ".whatever").setQuery(simpleQueryStringQuery("foo")).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse);
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(4, measurements.size());
Measurement measurement = measurements.getLast();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
// two dotted indices: categorized as "user"
assertSimpleQueryAttributes(measurement.attributes());
}
{
SearchResponse searchResponse = client().prepareSearch(".kibana", ".does_not_exist")
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN)
.setQuery(simpleQueryStringQuery("foo"))
.get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse);
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(5, measurements.size());
Measurement measurement = measurements.getLast();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(3, attributes.size());
// because the second index does not exist, yet the search goes through, the remaining index is categorized correctly
assertEquals(".kibana", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
}
{
SearchResponse searchResponse = client().prepareSearch("_all").setQuery(simpleQueryStringQuery("foo")).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2", "1", "2", "10", "11", "12", "13", "14", "15");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(6, measurements.size());
Measurement measurement = measurements.getLast();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
assertSimpleQueryAttributes(measurement.attributes());
}
}
public void testIndexNameMustExist() {
SearchResponse searchResponse = client().prepareSearch(".must_exist")
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN)
.setQuery(simpleQueryStringQuery("foo"))
.get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse);
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
// edge case rather than under .others (as it's a dotted index name), the index is categorized under "user" because no existing
// indices are targeted.
assertSimpleQueryAttributes(measurement.attributes());
}
public void testSimpleQuery() {
SearchResponse searchResponse = client().prepareSearch(indexName).setQuery(simpleQueryStringQuery("foo")).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
assertSimpleQueryAttributes(measurement.attributes());
}
public void testSimpleQueryAgainstWildcardExpression() {
SearchResponse searchResponse = client().prepareSearch("test*").setQuery(simpleQueryStringQuery("foo")).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
assertSimpleQueryAttributes(measurement.attributes());
}
public void testSimpleQueryAgainstAlias() {
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(
RestUtils.REST_MASTER_TIMEOUT_DEFAULT,
new TimeValue(30, TimeUnit.SECONDS)
);
indicesAliasesRequest.addAliasAction(IndicesAliasesRequest.AliasActions.add().indices(indexName).alias(".alias"));
IndicesAliasesResponse indicesAliasesResponse = client().admin().indices().aliases(indicesAliasesRequest).actionGet();
assertFalse(indicesAliasesResponse.hasErrors());
SearchResponse searchResponse = client().prepareSearch(".alias").setQuery(simpleQueryStringQuery("foo")).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
assertSimpleQueryAttributes(measurement.attributes());
}
private static void assertSimpleQueryAttributes(Map<String, Object> attributes) {
assertEquals(3, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
}
public void testCompoundRetriever() {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.retriever(
new RescorerRetrieverBuilder(
new StandardRetrieverBuilder(new MatchAllQueryBuilder()),
List.of(new QueryRescorerBuilder(new MatchAllQueryBuilder()))
)
);
SearchResponse searchResponse = client().prepareSearch(indexName).setSource(searchSourceBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
// compound retriever does its own search as an async action, whose took time is recorded separately
assertEquals(2, measurements.size());
assertThat(measurements.getFirst().getLong(), Matchers.lessThanOrEqualTo(searchResponse.getTook().millis()));
assertEquals(searchResponse.getTook().millis(), measurements.getLast().getLong());
for (Measurement measurement : measurements) {
Map<String, Object> attributes = measurement.attributes();
assertEquals(4, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("pit", attributes.get("pit_scroll"));
}
}
public void testMultiSearch() {
MultiSearchRequestBuilder multiSearchRequestBuilder = client().prepareMultiSearch();
int numSearchRequests = randomIntBetween(3, 10);
for (int i = 0; i < numSearchRequests; i++) {
SearchRequest searchRequest = new SearchRequest();
searchRequest.source(new SearchSourceBuilder().query(simpleQueryStringQuery("foo")));
multiSearchRequestBuilder.add(searchRequest);
}
List<Long> tookTimes;
MultiSearchResponse multiSearchResponse = null;
try {
multiSearchResponse = multiSearchRequestBuilder.get();
tookTimes = Arrays.stream(multiSearchResponse.getResponses())
.map(item -> item.getResponse().getTook().millis())
.sorted()
.toList();
} finally {
if (multiSearchResponse != null) {
multiSearchResponse.decRef();
}
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(numSearchRequests, measurements.size());
measurements.sort(Comparator.comparing(Measurement::getLong));
int i = 0;
for (Measurement measurement : measurements) {
assertEquals(tookTimes.get(i++).longValue(), measurement.getLong());
assertSimpleQueryAttributes(measurement.attributes());
}
}
public void testScroll() {
assertScrollResponsesAndHitCount(
client(),
TimeValue.timeValueSeconds(60),
client().prepareSearch(indexName).setSize(1).setQuery(simpleQueryStringQuery("foo")),
2,
(respNum, response) -> {
if (respNum == 1) {
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(
TOOK_DURATION_TOTAL_HISTOGRAM_NAME
);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
Map<String, Object> attributes = measurement.attributes();
assertEquals(4, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("scroll", attributes.get("pit_scroll"));
assertEquals("_score", attributes.get("sort"));
} else {
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(
TOOK_DURATION_TOTAL_HISTOGRAM_NAME
);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
Map<String, Object> attributes = measurement.attributes();
assertEquals(1, attributes.size());
assertEquals("scroll", attributes.get("query_type"));
}
resetMeter();
}
);
}
/**
* Make sure that despite can match and query rewrite, we see the time range filter and record its corresponding attribute
*/
public void testTimeRangeFilterNoResults() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new RangeQueryBuilder("@timestamp").from("2025-01-01"));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(indexName).setPreFilterShardSize(1).setQuery(boolQueryBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse);
// can match kicked in, query got rewritten to match_none, yet we extracted the time range before rewrite
assertEquals(searchResponse.getSkippedShards(), searchResponse.getTotalShards());
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(4, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("@timestamp", attributes.get("time_range_filter_field"));
// there were no results, and no shards queried, hence no range filter extracted from the query either
}
/**
* Make sure that despite can match and query rewrite, we see the time range filter and record its corresponding attribute
*/
public void testTimeRangeFilterAllResults() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new RangeQueryBuilder("@timestamp").from("2024-10-01"));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(indexName).setPreFilterShardSize(1).setQuery(boolQueryBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
// in this case the range query gets rewritten to a range query with open bounds on the shards. Here we test that query rewrite
// is able to grab the parsed range filter and propagate it all the way to the search response
assertTimeRangeAttributes(measurement.attributes());
}
public void testTimeRangeFilterOneResult() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new RangeQueryBuilder("@timestamp").from("2024-12-01"));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(indexName).setPreFilterShardSize(1).setQuery(boolQueryBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
assertTimeRangeAttributes(measurement.attributes());
}
private static void assertTimeRangeAttributes(Map<String, Object> attributes) {
assertEquals(5, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("@timestamp", attributes.get("time_range_filter_field"));
assertEquals("older_than_14_days", attributes.get("time_range_filter_from"));
}
public void testTimeRangeFilterAllResultsFilterOnEventIngested() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new RangeQueryBuilder("event.ingested").from("2024-10-01"));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(indexName).setPreFilterShardSize(1).setQuery(boolQueryBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(5, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("event.ingested", attributes.get("time_range_filter_field"));
assertEquals("older_than_14_days", attributes.get("time_range_filter_from"));
}
public void testTimeRangeFilterAllResultsFilterOnEventIngestedAndTimestamp() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new RangeQueryBuilder("event.ingested").from("2024-10-01"));
boolQueryBuilder.filter(new RangeQueryBuilder("@timestamp").from("2024-10-01"));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(indexName).setPreFilterShardSize(1).setQuery(boolQueryBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(5, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("@timestamp_AND_event.ingested", attributes.get("time_range_filter_field"));
assertEquals("older_than_14_days", attributes.get("time_range_filter_from"));
}
public void testTimeRangeFilterOneResultQueryAndFetchRecentTimestamps() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new RangeQueryBuilder("@timestamp").from(FORMATTER_MILLIS.format(NOW.minusMinutes(10))));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(singleShardIndexName)
.setQuery(boolQueryBuilder)
.addSort(new FieldSortBuilder("@timestamp"))
.get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(5, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("@timestamp", attributes.get("sort"));
assertEquals("@timestamp", attributes.get("time_range_filter_field"));
assertEquals("15_minutes", attributes.get("time_range_filter_from"));
}
public void testMultipleTimeRangeFiltersQueryAndFetchRecentTimestamps() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
// we take the lowest of the two bounds
boolQueryBuilder.must(new RangeQueryBuilder("@timestamp").from(FORMATTER_MILLIS.format(NOW.minusMinutes(20))));
boolQueryBuilder.filter(new RangeQueryBuilder("@timestamp").from(FORMATTER_MILLIS.format(NOW.minusMinutes(10))));
// should and must_not get ignored
boolQueryBuilder.should(new RangeQueryBuilder("@timestamp").from(FORMATTER_MILLIS.format(NOW.minusMinutes(2))));
boolQueryBuilder.mustNot(new RangeQueryBuilder("@timestamp").from(FORMATTER_MILLIS.format(NOW.minusMinutes(1))));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(singleShardIndexName)
.setQuery(boolQueryBuilder)
.addSort(new FieldSortBuilder("@timestamp"))
.get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(5, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("@timestamp", attributes.get("sort"));
assertEquals("@timestamp", attributes.get("time_range_filter_field"));
assertEquals("1_hour", attributes.get("time_range_filter_from"));
}
public void testTimeRangeFilterAllResultsShouldClause() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.should(new RangeQueryBuilder("@timestamp").from("2024-10-01"));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(indexName).setQuery(boolQueryBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
assertSimpleQueryAttributes(measurement.attributes());
}
public void testTimeRangeFilterOneResultMustNotClause() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.mustNot(new RangeQueryBuilder("@timestamp").from("2024-12-01"));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(indexName).setQuery(boolQueryBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
assertSimpleQueryAttributes(measurement.attributes());
}
public void testTimeRangeFilterAllResultsNanoPrecision() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new RangeQueryBuilder("@timestamp").from(FORMATTER_NANOS.format(NOW.minusMinutes(20))));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(indexNameNanoPrecision).setQuery(boolQueryBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "10", "11", "12", "13", "14");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(5, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("@timestamp", attributes.get("time_range_filter_field"));
assertEquals("1_hour", attributes.get("time_range_filter_from"));
}
public void testTimeRangeFilterAllResultsMixedPrecision() {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new RangeQueryBuilder("@timestamp").from(FORMATTER_NANOS.format(NOW.minusMinutes(20))));
boolQueryBuilder.must(simpleQueryStringQuery("foo"));
SearchResponse searchResponse = client().prepareSearch(singleShardIndexName, indexNameNanoPrecision)
.setQuery(boolQueryBuilder)
.get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "1", "10", "11", "12", "13", "14");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
Measurement measurement = measurements.getFirst();
assertEquals(searchResponse.getTook().millis(), measurement.getLong());
Map<String, Object> attributes = measurement.attributes();
assertEquals(5, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("@timestamp", attributes.get("time_range_filter_field"));
assertEquals("1_hour", attributes.get("time_range_filter_from"));
}
public void testStandardRetrieverWithTimeRangeQuery() {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.retriever(new StandardRetrieverBuilder(new RangeQueryBuilder("event.ingested").from("2024-12-01")));
SearchResponse searchResponse = client().prepareSearch(indexName).setSource(searchSourceBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
assertEquals(1, measurements.size());
assertThat(measurements.getFirst().getLong(), Matchers.lessThanOrEqualTo(searchResponse.getTook().millis()));
assertEquals(searchResponse.getTook().millis(), measurements.getLast().getLong());
for (Measurement measurement : measurements) {
Map<String, Object> attributes = measurement.attributes();
assertEquals(5, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("event.ingested", attributes.get("time_range_filter_field"));
assertEquals("older_than_14_days", attributes.get("time_range_filter_from"));
}
}
public void testCompoundRetrieverWithTimeRangeQuery() {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.retriever(
new RescorerRetrieverBuilder(
new StandardRetrieverBuilder(new RangeQueryBuilder("@timestamp").from("2024-12-01")),
List.of(new QueryRescorerBuilder(new MatchAllQueryBuilder()))
)
);
SearchResponse searchResponse = client().prepareSearch(indexName).setSource(searchSourceBuilder).get();
try {
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "2");
} finally {
searchResponse.decRef();
}
List<Measurement> measurements = getTestTelemetryPlugin().getLongHistogramMeasurement(TOOK_DURATION_TOTAL_HISTOGRAM_NAME);
// compound retriever does its own search as an async action, whose took time is recorded separately
assertEquals(2, measurements.size());
assertThat(measurements.getFirst().getLong(), Matchers.lessThan(searchResponse.getTook().millis()));
assertEquals(searchResponse.getTook().millis(), measurements.getLast().getLong());
for (Measurement measurement : measurements) {
Map<String, Object> attributes = measurement.attributes();
assertEquals(6, attributes.size());
assertEquals("user", attributes.get("target"));
assertEquals("hits_only", attributes.get("query_type"));
assertEquals("_score", attributes.get("sort"));
assertEquals("pit", attributes.get("pit_scroll"));
assertEquals("@timestamp", attributes.get("time_range_filter_field"));
assertEquals("older_than_14_days", attributes.get("time_range_filter_from"));
}
}
private void resetMeter() {
getTestTelemetryPlugin().resetMeter();
}
private TestTelemetryPlugin getTestTelemetryPlugin() {
return getInstanceFromNode(PluginsService.class).filterPlugins(TestTelemetryPlugin.class).toList().getFirst();
}
}
| SearchTookTimeTelemetryTests |
java | apache__camel | tooling/maven/camel-eip-documentation-enricher-maven-plugin/src/main/java/org/apache/camel/maven/Constants.java | {
"start": 891,
"end": 1548
} | class ____ {
// Camel core constants.
public static final String DEFAULT_XML_INTENTION = " ";
public static final int WRAP_LENGTH = 120;
// XML constants.
public static final String XML_SCHEMA_NAMESPACE_PREFIX = "xs";
public static final String XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema";
public static final String NAME_ATTRIBUTE_NAME = "name";
public static final String TYPE_ATTRIBUTE_NAME = "type";
public static final String XS_ANNOTATION_ELEMENT_NAME = "xs:annotation";
public static final String XS_DOCUMENTATION_ELEMENT_NAME = "xs:documentation";
private Constants() {
}
}
| Constants |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/javatime/ser/YearSerTest.java | {
"start": 1091,
"end": 2724
} | class ____ {
@JsonFormat(shape = JsonFormat.Shape.STRING)
public Year value;
public YearAsStringWrapper(Year value) {
this.value = value;
}
}
// Defaults fine: year only serialized as String with explicit
// overrides
private final ObjectMapper MAPPER = newMapper();
@Test
public void testDefaultSerialization() throws Exception
{
assertEquals("1986",
MAPPER.writeValueAsString(Year.of(1986)));
assertEquals("2013",
MAPPER.writeValueAsString(Year.of(2013)));
}
@Test
public void testAsStringSerializationViaAnnotation() throws Exception
{
assertEquals(a2q("{'value':'1972'}"),
MAPPER.writeValueAsString(new YearAsStringWrapper(Year.of(1972))));
}
@Test
public void testAsStringSerializationViaFormatConfig() throws Exception
{
final ObjectMapper asStringMapper = mapperBuilder()
.withConfigOverride(Year.class, o -> o.setFormat(
JsonFormat.Value.forShape(JsonFormat.Shape.STRING)))
.build();
assertEquals(q("2025"),
asStringMapper.writeValueAsString(Year.of(2025)));
}
@Test
public void testSerializationWithTypeInfo() throws Exception
{
ObjectMapper mapper = newMapperBuilder()
.addMixIn(Temporal.class, MockObjectConfiguration.class)
.build();
String value = mapper.writeValueAsString(Year.of(2005));
assertEquals("[\"" + Year.class.getName() + "\",2005]", value);
}
}
| YearAsStringWrapper |
java | apache__camel | components/camel-telegram/src/test/java/org/apache/camel/component/telegram/TelegramConsumerMediaVideoTest.java | {
"start": 1707,
"end": 3546
} | class ____ extends TelegramTestSupport {
@EndpointInject("mock:telegram")
private MockEndpoint endpoint;
@Test
public void testReceptionOfAMessageWithAVideo() throws Exception {
endpoint.expectedMinimumMessageCount(1);
endpoint.assertIsSatisfied(5000);
Exchange mediaExchange = endpoint.getExchanges().get(0);
IncomingMessage msg = mediaExchange.getIn().getBody(IncomingMessage.class);
IncomingVideo video = msg.getVideo();
assertNotNull(video);
assertEquals(Integer.valueOf(2), video.getDurationSeconds());
assertEquals(Integer.valueOf(360), video.getHeight());
assertEquals(Integer.valueOf(640), video.getWidth());
assertEquals(Long.valueOf(299284), video.getFileSize());
assertEquals("BAADBAADAgADyzvwCC7_4AyvdAXXXX", video.getFileId());
IncomingPhotoSize thumb = video.getThumb();
assertNotNull(thumb);
}
@Override
protected RoutesBuilder[] createRouteBuilders() {
return new RoutesBuilder[] {
getMockRoutes(),
new RouteBuilder() {
@Override
public void configure() {
from("telegram:bots?authorizationToken=mock-token")
.to("mock:telegram");
}
} };
}
@Override
protected TelegramMockRoutes createMockRoutes() {
return new TelegramMockRoutes(port)
.addEndpoint(
"getUpdates",
"GET",
String.class,
TelegramTestUtil.stringResource("messages/updates-media-video.json"),
TelegramTestUtil.stringResource("messages/updates-empty.json"));
}
}
| TelegramConsumerMediaVideoTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityTestsUtils.java | {
"start": 1120,
"end": 6095
} | class ____ {
private SecurityTestsUtils() {}
public static void assertAuthenticationException(ElasticsearchSecurityException e) {
assertThat(e.status(), is(RestStatus.UNAUTHORIZED));
// making sure it's not a license expired exception
assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), nullValue());
assertContainsWWWAuthenticateHeader(e);
}
public static void assertAuthenticationException(ElasticsearchSecurityException e, Matcher<String> messageMatcher) {
assertAuthenticationException(e);
assertThat(e.getMessage(), messageMatcher);
}
public static void assertThrowsAuthorizationException(LuceneTestCase.ThrowingRunnable throwingRunnable, String action, String user) {
assertThrowsAuthorizationException(null, throwingRunnable, action, user);
}
public static void assertThrowsAuthorizationException(
String context,
LuceneTestCase.ThrowingRunnable throwingRunnable,
String action,
String user
) {
String message = "Expected authorization failure for user=[" + user + "], action=[" + action + "]";
if (Strings.hasText(context)) {
message += " - " + context;
}
assertThrowsAuthorizationException(
message,
throwingRunnable,
containsString("[" + action + "] is unauthorized for user [" + user + "]")
);
}
public static void assertThrowsAuthorizationExceptionRunAsDenied(
LuceneTestCase.ThrowingRunnable throwingRunnable,
String action,
User authenticatingUser,
String runAs
) {
assertThrowsAuthorizationException(
"Expected authorization failure for user=["
+ authenticatingUser.principal()
+ "], run-as=["
+ runAs
+ "], action=["
+ action
+ "]",
throwingRunnable,
containsString(
"action ["
+ action
+ "] is unauthorized for user ["
+ authenticatingUser.principal()
+ "]"
+ String.format(
Locale.ROOT,
" with effective roles [%s]",
Strings.arrayToCommaDelimitedString(authenticatingUser.roles())
)
+ ", because user ["
+ authenticatingUser.principal()
+ "] is unauthorized to run as ["
+ runAs
+ "]"
)
);
}
public static void assertThrowsAuthorizationExceptionRunAsUnauthorizedAction(
LuceneTestCase.ThrowingRunnable throwingRunnable,
String action,
String user,
String runAs
) {
assertThrowsAuthorizationException(
"Expected authorization failure for user=[" + user + "], run-as=[" + runAs + "], action=[" + action + "]",
throwingRunnable,
containsString("[" + action + "] is unauthorized for user [" + user + "] run as [" + runAs + "]")
);
}
public static void assertThrowsAuthorizationExceptionDefaultUsers(LuceneTestCase.ThrowingRunnable throwingRunnable, String action) {
ElasticsearchSecurityException exception = expectThrows(ElasticsearchSecurityException.class, throwingRunnable);
assertAuthorizationExceptionDefaultUsers(exception, action);
}
public static void assertAuthorizationExceptionDefaultUsers(Throwable throwable, String action) {
assertAuthorizationException(
throwable,
either(containsString("[" + action + "] is unauthorized for user [" + SecuritySettingsSource.TEST_USER_NAME + "]")).or(
containsString(
"[" + action + "] is unauthorized for user [" + SecuritySettingsSource.DEFAULT_TRANSPORT_CLIENT_USER_NAME + "]"
)
)
);
}
public static void assertThrowsAuthorizationException(
String failureMessageIfNoException,
LuceneTestCase.ThrowingRunnable throwingRunnable,
Matcher<String> messageMatcher
) {
ElasticsearchSecurityException securityException = expectThrows(
ElasticsearchSecurityException.class,
failureMessageIfNoException,
throwingRunnable
);
assertAuthorizationException(securityException, messageMatcher);
}
private static void assertAuthorizationException(Throwable throwable, Matcher<String> messageMatcher) {
assertThat(throwable, instanceOf(ElasticsearchSecurityException.class));
ElasticsearchSecurityException securityException = (ElasticsearchSecurityException) throwable;
assertThat(securityException.status(), is(RestStatus.FORBIDDEN));
assertThat(throwable.getMessage(), messageMatcher);
}
}
| SecurityTestsUtils |
java | apache__flink | flink-libraries/flink-state-processing-api/src/test/java/org/apache/flink/state/api/SavepointReaderUidHashITCase.java | {
"start": 1197,
"end": 2639
} | class ____ extends SavepointReaderITTestBase {
private static final ListStateDescriptor<Integer> list =
new ListStateDescriptor<>(LIST_NAME, Types.INT);
private static final ListStateDescriptor<Integer> union =
new ListStateDescriptor<>(UNION_NAME, Types.INT);
private static final MapStateDescriptor<Integer, String> broadcast =
new MapStateDescriptor<>(BROADCAST_NAME, Types.INT, Types.STRING);
public SavepointReaderUidHashITCase() {
super(list, union, broadcast);
}
@Override
public DataStream<Integer> readListState(SavepointReader savepoint) throws IOException {
return savepoint.readListState(getUidHashFromUid(UID), LIST_NAME, Types.INT);
}
@Override
public DataStream<Integer> readUnionState(SavepointReader savepoint) throws IOException {
return savepoint.readUnionState(getUidHashFromUid(UID), UNION_NAME, Types.INT);
}
@Override
public DataStream<Tuple2<Integer, String>> readBroadcastState(SavepointReader savepoint)
throws IOException {
return savepoint.readBroadcastState(
getUidHashFromUid(UID), BROADCAST_NAME, Types.INT, Types.STRING);
}
private static OperatorIdentifier getUidHashFromUid(String uid) {
return OperatorIdentifier.forUidHash(
OperatorIdentifier.forUid(uid).getOperatorId().toHexString());
}
}
| SavepointReaderUidHashITCase |
java | netty__netty | resolver-dns/src/main/java/io/netty/resolver/dns/TcpDnsQueryContext.java | {
"start": 1141,
"end": 2159
} | class ____ extends DnsQueryContext {
TcpDnsQueryContext(Channel channel,
InetSocketAddress nameServerAddr,
DnsQueryContextManager queryContextManager,
DnsQueryLifecycleObserver lifecycleObserver,
int maxPayLoadSize, boolean recursionDesired,
long queryTimeoutMillis,
DnsQuestion question, DnsRecord[] additionals,
Promise<AddressedEnvelope<DnsResponse, InetSocketAddress>> promise) {
super(channel, nameServerAddr, queryContextManager, lifecycleObserver, maxPayLoadSize, recursionDesired,
// No retry via TCP.
queryTimeoutMillis, question, additionals, promise, null, false);
}
@Override
protected DnsQuery newQuery(int id, InetSocketAddress nameServerAddr) {
return new DefaultDnsQuery(id);
}
@Override
protected String protocol() {
return "TCP";
}
}
| TcpDnsQueryContext |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/ForStMemoryControllerUtils.java | {
"start": 1216,
"end": 8035
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(ForStMemoryControllerUtils.class);
/**
* Allocate memory controllable ForSt shared resources.
*
* @param totalMemorySize The total memory limit size.
* @param writeBufferRatio The ratio of total memory which is occupied by write buffer manager.
* @param highPriorityPoolRatio The high priority pool ratio of cache.
* @param factory creates Write Buffer Manager and Bock Cache
* @return memory controllable RocksDB shared resources.
*/
public static ForStSharedResources allocateForStSharedResources(
long totalMemorySize,
double writeBufferRatio,
double highPriorityPoolRatio,
boolean usingPartitionedIndexFilters,
ForStMemoryFactory factory) {
long calculatedCacheCapacity =
ForStMemoryControllerUtils.calculateActualCacheCapacity(
totalMemorySize, writeBufferRatio);
final Cache cache = factory.createCache(calculatedCacheCapacity, highPriorityPoolRatio);
long writeBufferManagerCapacity =
ForStMemoryControllerUtils.calculateWriteBufferManagerCapacity(
totalMemorySize, writeBufferRatio);
final WriteBufferManager wbm =
factory.createWriteBufferManager(writeBufferManagerCapacity, cache);
LOG.debug(
"Allocated ForSt shared resources, calculatedCacheCapacity: {}, highPriorityPoolRatio: {}, writeBufferManagerCapacity: {}, usingPartitionedIndexFilters: {}",
calculatedCacheCapacity,
highPriorityPoolRatio,
writeBufferManagerCapacity,
usingPartitionedIndexFilters);
return new ForStSharedResources(
cache, wbm, writeBufferManagerCapacity, usingPartitionedIndexFilters);
}
/**
* Calculate the actual memory capacity of cache, which would be shared among ForSt instance(s).
* We introduce this method because: a) We cannot create a strict capacity limit cache util
* FLINK-15532 resolved. b) Regardless of the memory usage of blocks pinned by ForSt iterators,
* which is difficult to calculate and only happened when we iterator entries in MapState, the
* overuse of memory is mainly occupied by at most half of the write buffer usage. (see <a
* href="https://github.com/dataArtisans/frocksdb/blob/958f191d3f7276ae59b270f9db8390034d549ee0/include/rocksdb/write_buffer_manager.h#L51">the
* flush implementation of write buffer manager</a>). Thus, we have four equations below:
* write_buffer_manager_memory = 1.5 * write_buffer_manager_capacity write_buffer_manager_memory
* = total_memory_size * write_buffer_ratio write_buffer_manager_memory + other_part =
* total_memory_size write_buffer_manager_capacity + other_part = cache_capacity And we would
* deduce the formula: cache_capacity = (3 - write_buffer_ratio) * total_memory_size / 3
* write_buffer_manager_capacity = 2 * total_memory_size * write_buffer_ratio / 3
*
* @param totalMemorySize Total off-heap memory size reserved for ForSt instance(s).
* @param writeBufferRatio The ratio of total memory size which would be reserved for write
* buffer manager and its over-capacity part.
* @return The actual calculated cache capacity.
*/
@VisibleForTesting
public static long calculateActualCacheCapacity(long totalMemorySize, double writeBufferRatio) {
return (long) ((3 - writeBufferRatio) * totalMemorySize / 3);
}
/**
* Calculate the actual memory capacity of write buffer manager, which would be shared among
* ForSt instance(s). The formula to use here could refer to the doc of {@link
* #calculateActualCacheCapacity(long, double)}.
*
* @param totalMemorySize Total off-heap memory size reserved for ForSt instance(s).
* @param writeBufferRatio The ratio of total memory size which would be reserved for write
* buffer manager and its over-capacity part.
* @return The actual calculated write buffer manager capacity.
*/
@VisibleForTesting
static long calculateWriteBufferManagerCapacity(long totalMemorySize, double writeBufferRatio) {
return (long) (2 * totalMemorySize * writeBufferRatio / 3);
}
@VisibleForTesting
static Cache createCache(long cacheCapacity, double highPriorityPoolRatio) {
// TODO use strict capacity limit until FLINK-15532 resolved
return new LRUCache(cacheCapacity, -1, false, highPriorityPoolRatio);
}
@VisibleForTesting
static WriteBufferManager createWriteBufferManager(
long writeBufferManagerCapacity, Cache cache) {
return new WriteBufferManager(writeBufferManagerCapacity, cache);
}
/**
* Calculate the default arena block size as ForSt calculates it in <a
* href="https://github.com/dataArtisans/frocksdb/blob/49bc897d5d768026f1eb816d960c1f2383396ef4/db/column_family.cc#L196-L201">
* here</a>.
*
* @return the default arena block size
* @param writeBufferSize the write buffer size (bytes)
*/
static long calculateForStDefaultArenaBlockSize(long writeBufferSize) {
long arenaBlockSize = writeBufferSize / 8;
// Align up to 4k
final long align = 4 * 1024;
return ((arenaBlockSize + align - 1) / align) * align;
}
/**
* Calculate {@code mutable_limit_} as ForSt calculates it in <a
* href="https://github.com/dataArtisans/frocksdb/blob/FRocksDB-5.17.2/memtable/write_buffer_manager.cc#L54">
* here</a>.
*
* @param bufferSize write buffer size
* @return mutableLimit
*/
static long calculateForStMutableLimit(long bufferSize) {
return bufferSize * 7 / 8;
}
/**
* ForSt starts flushing the active memtable constantly in the case when the arena block size is
* greater than mutable limit (as calculated in {@link #calculateForStMutableLimit(long)}).
*
* <p>This happens because in such a case the check <a
* href="https://github.com/dataArtisans/frocksdb/blob/958f191d3f7276ae59b270f9db8390034d549ee0/include/rocksdb/write_buffer_manager.h#L47">
* here</a> is always true.
*
* <p>This method checks that arena block size is smaller than mutable limit.
*
* @param arenaBlockSize Arena block size
* @param mutableLimit mutable limit
* @return whether arena block size is sensible
*/
@VisibleForTesting
static boolean validateArenaBlockSize(long arenaBlockSize, long mutableLimit) {
return arenaBlockSize <= mutableLimit;
}
/** Factory for Write Buffer Manager and Bock Cache. */
public | ForStMemoryControllerUtils |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/BindToRegistryBeanPostProcessorTest.java | {
"start": 1256,
"end": 2184
} | class ____ extends ContextTestSupport {
// field
@BindToRegistry(beanPostProcess = true)
private final FooService foo = new FooService();
// method
@BindToRegistry(beanPostProcess = true)
public FooService myOtherFoo() {
return new FooService();
}
@Test
public void testPostProcessor() throws Exception {
// bean post processing dont run on ContextTestSupport
CamelBeanPostProcessor cbpp = PluginHelper.getBeanPostProcessor(context);
cbpp.postProcessBeforeInitialization(this, "this");
cbpp.postProcessAfterInitialization(this, "this");
assertNotNull(foo);
assertSame(context, foo.getCamelContext());
FooService other = (FooService) context.getRegistry().lookupByName("myOtherFoo");
assertNotNull(other);
assertSame(context, other.getCamelContext());
}
public static | BindToRegistryBeanPostProcessorTest |
java | quarkusio__quarkus | extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientNameBuildItem.java | {
"start": 209,
"end": 703
} | class ____ extends MultiBuildItem {
private final String name;
private final boolean addQualifier;
public MongoClientNameBuildItem(String name) {
this(name, true);
}
public MongoClientNameBuildItem(String name, boolean addQualifier) {
this.name = name;
this.addQualifier = addQualifier;
}
public String getName() {
return name;
}
public boolean isAddQualifier() {
return addQualifier;
}
}
| MongoClientNameBuildItem |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/validators/ParamValidationCheckerTest.java | {
"start": 1478,
"end": 46927
} | class ____ {
@Test(timeout = 30000)
public void checkFlowable() {
checkClass(Flowable.class);
}
@Test(timeout = 30000)
public void checkObservable() {
checkClass(Observable.class);
}
@Test(timeout = 30000)
public void checkSingle() {
checkClass(Single.class);
}
@Test(timeout = 30000)
public void checkMaybe() {
checkClass(Maybe.class);
}
@Test(timeout = 30000)
public void checkCompletable() {
checkClass(Completable.class);
}
@Test(timeout = 30000)
public void checkParallelFlowable() {
checkClass(ParallelFlowable.class);
}
// ---------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------
static Map<String, List<ParamOverride>> overrides;
static Map<String, List<ParamIgnore>> ignores;
static Map<Class<?>, Object> defaultValues;
static Map<Class<?>, List<Object>> defaultInstances;
static {
overrides = new HashMap<>();
// ***********************************************************************************************************************
// zero index allowed
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "elementAt", Long.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "elementAt", Long.TYPE, Object.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "elementAtOrError", Long.TYPE));
// negative skip count is ignored
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "skip", Long.TYPE));
// negative skip time is considered as zero skip time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "skip", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "skip", Long.TYPE, TimeUnit.class, Scheduler.class));
// can start with zero initial request
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "test", Long.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "test", Long.TYPE, Boolean.TYPE));
// negative timeout time is considered as zero timeout time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Publisher.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class, Publisher.class));
// negative buffer time is considered as zero buffer time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class, Integer.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class, Scheduler.class, Integer.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class, Scheduler.class, Integer.TYPE, Supplier.class, Boolean.TYPE));
// negative time/skip is considered zero time/skip
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, Supplier.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, Supplier.class));
// negative timeout is allowed
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "fromFuture", Future.class, Long.TYPE, TimeUnit.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "interval", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "interval", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "interval", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "interval", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "interval", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "interval", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class, Scheduler.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "debounce", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "debounce", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "debounce", Long.TYPE, TimeUnit.class, Scheduler.class, Consumer.class));
// null Action allowed
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "onBackpressureBuffer", Long.TYPE, Action.class, BackpressureOverflowStrategy.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "onBackpressureBuffer", Long.TYPE, Action.class, BackpressureOverflowStrategy.class, Consumer.class));
// zero repeat is allowed
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "repeat", Long.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "replay", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "replay", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "replay", Long.TYPE, TimeUnit.class, Scheduler.class, boolean.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "replay", Integer.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "replay", Integer.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "replay", Integer.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, boolean.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "replay", Function.class, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "replay", Function.class, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "replay", Function.class, Long.TYPE, TimeUnit.class, Scheduler.class, boolean.class));
addOverride(new ParamOverride(Flowable.class, 2, ParamMode.ANY, "replay", Function.class, Integer.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 2, ParamMode.ANY, "replay", Function.class, Integer.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 2, ParamMode.ANY, "replay", Function.class, Integer.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, boolean.class));
// zero retry is allowed
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE, Predicate.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleWithTimeout", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleWithTimeout", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleWithTimeout", Long.TYPE, TimeUnit.class, Scheduler.class, Consumer.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "take", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "take", Long.TYPE, TimeUnit.class, Scheduler.class));
// zero take is allowed
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "take", Long.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Consumer.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Integer.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Integer.TYPE));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 1, ParamMode.ANY, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Integer.TYPE));
// take last 0 is allowed
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "takeLast", Integer.TYPE));
// skip last 0 is allowed
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.NON_NEGATIVE, "skipLast", Integer.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Integer.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleFirst", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleFirst", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleFirst", Long.TYPE, TimeUnit.class, Scheduler.class, Consumer.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleLast", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleLast", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleLast", Long.TYPE, TimeUnit.class, Scheduler.class, Consumer.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Consumer.class));
// negative buffer time is considered as zero buffer time
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Long.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Long.TYPE, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Scheduler.class, Long.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Scheduler.class, Long.TYPE, Boolean.TYPE));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Scheduler.class, Long.TYPE, Boolean.TYPE, Integer.TYPE));
// ***********************************************************************************************************************
// negative timeout time is considered as zero timeout time
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, CompletableSource.class));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class, CompletableSource.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class, Scheduler.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class, Scheduler.class));
// zero repeat is allowed
addOverride(new ParamOverride(Completable.class, 0, ParamMode.NON_NEGATIVE, "repeat", Long.TYPE));
// zero retry is allowed
addOverride(new ParamOverride(Completable.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE, Predicate.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "blockingAwait", Long.TYPE, TimeUnit.class));
// ***********************************************************************************************************************
// negative timeout time is considered as zero timeout time
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, MaybeSource.class));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class, MaybeSource.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class, Scheduler.class));
// negative timeout is allowed
addOverride(new ParamOverride(Maybe.class, 1, ParamMode.ANY, "fromFuture", Future.class, Long.TYPE, TimeUnit.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
// zero repeat is allowed
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.NON_NEGATIVE, "repeat", Long.TYPE));
// zero retry is allowed
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE, Predicate.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class, Scheduler.class));
// ***********************************************************************************************************************
// negative timeout time is considered as zero timeout time
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, SingleSource.class));
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class, SingleSource.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class, Scheduler.class));
// negative timeout is allowed
addOverride(new ParamOverride(Single.class, 1, ParamMode.ANY, "fromFuture", Future.class, Long.TYPE, TimeUnit.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
// zero repeat is allowed
addOverride(new ParamOverride(Single.class, 0, ParamMode.NON_NEGATIVE, "repeat", Long.TYPE));
// zero retry is allowed
addOverride(new ParamOverride(Single.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE));
addOverride(new ParamOverride(Single.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE, Predicate.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Single.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class, Scheduler.class));
// ***********************************************************************************************************************
// zero index allowed
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "elementAt", Long.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "elementAt", Long.TYPE, Object.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "elementAtOrError", Long.TYPE));
// negative skip count is ignored
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "skip", Long.TYPE));
// negative skip time is considered as zero skip time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "skip", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "skip", Long.TYPE, TimeUnit.class, Scheduler.class));
// negative timeout time is considered as zero timeout time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, ObservableSource.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "timeout", Long.TYPE, TimeUnit.class, Scheduler.class, ObservableSource.class));
// negative buffer time is considered as zero buffer time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class, Integer.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class, Scheduler.class, Integer.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, TimeUnit.class, Scheduler.class, Integer.TYPE, Supplier.class, Boolean.TYPE));
// negative time/skip is considered zero time/skip
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, Supplier.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "buffer", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, Supplier.class));
// negative timeout is allowed
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "fromFuture", Future.class, Long.TYPE, TimeUnit.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "timer", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "interval", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "interval", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "interval", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "interval", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "interval", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "interval", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "delay", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "delaySubscription", Long.TYPE, TimeUnit.class, Scheduler.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "debounce", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "debounce", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "debounce", Long.TYPE, TimeUnit.class, Scheduler.class, Consumer.class));
// zero repeat is allowed
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "repeat", Long.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "replay", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "replay", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "replay", Long.TYPE, TimeUnit.class, Scheduler.class, boolean.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "replay", Integer.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "replay", Integer.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "replay", Integer.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, boolean.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "replay", Function.class, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "replay", Function.class, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "replay", Function.class, Long.TYPE, TimeUnit.class, Scheduler.class, boolean.class));
addOverride(new ParamOverride(Observable.class, 2, ParamMode.ANY, "replay", Function.class, Integer.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 2, ParamMode.ANY, "replay", Function.class, Integer.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 2, ParamMode.ANY, "replay", Function.class, Integer.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, boolean.class));
// zero retry is allowed
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "retry", Long.TYPE, Predicate.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleWithTimeout", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleWithTimeout", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleWithTimeout", Long.TYPE, TimeUnit.class, Scheduler.class, Consumer.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "take", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "take", Long.TYPE, TimeUnit.class, Scheduler.class));
// zero retry is allowed
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "take", Long.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "sample", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Consumer.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "takeLast", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Integer.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Integer.TYPE));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 1, ParamMode.ANY, "takeLast", Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Integer.TYPE));
// take last 0 is allowed
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "takeLast", Integer.TYPE));
// skip last 0 is allowed
addOverride(new ParamOverride(Observable.class, 0, ParamMode.NON_NEGATIVE, "skipLast", Integer.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "skipLast", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Integer.TYPE));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleFirst", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleFirst", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleFirst", Long.TYPE, TimeUnit.class, Scheduler.class, Consumer.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleLast", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleLast", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleLast", Long.TYPE, TimeUnit.class, Scheduler.class, Consumer.class));
// negative time is considered as zero time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "throttleLatest", Long.TYPE, TimeUnit.class, Scheduler.class, Boolean.TYPE, Consumer.class));
// negative buffer time is considered as zero buffer time
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Long.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Long.TYPE, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Scheduler.class, Long.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Scheduler.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Scheduler.class, Long.TYPE, Boolean.TYPE));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "window", Long.TYPE, TimeUnit.class, Scheduler.class, Long.TYPE, Boolean.TYPE, Integer.TYPE));
// null value allowed
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "firstStage", Object.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "singleStage", Object.class));
addOverride(new ParamOverride(Flowable.class, 0, ParamMode.ANY, "lastStage", Object.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "firstStage", Object.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "singleStage", Object.class));
addOverride(new ParamOverride(Observable.class, 0, ParamMode.ANY, "lastStage", Object.class));
addOverride(new ParamOverride(Maybe.class, 0, ParamMode.ANY, "toCompletionStage", Object.class));
addOverride(new ParamOverride(Completable.class, 0, ParamMode.ANY, "toCompletionStage", Object.class));
// -----------------------------------------------------------------------------------
ignores = new HashMap<>();
// needs special param validation due to (long)start + end - 1 <= Integer.MAX_VALUE
addIgnore(new ParamIgnore(Flowable.class, "range", Integer.TYPE, Integer.TYPE));
addIgnore(new ParamIgnore(Flowable.class, "rangeLong", Long.TYPE, Long.TYPE));
addIgnore(new ParamIgnore(Flowable.class, "intervalRange", Long.TYPE, Long.TYPE, Long.TYPE, TimeUnit.class));
addIgnore(new ParamIgnore(Flowable.class, "intervalRange", Long.TYPE, Long.TYPE, Long.TYPE, Long.TYPE, TimeUnit.class));
addIgnore(new ParamIgnore(Flowable.class, "intervalRange", Long.TYPE, Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addIgnore(new ParamIgnore(Flowable.class, "intervalRange", Long.TYPE, Long.TYPE, Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addIgnore(new ParamIgnore(Flowable.class, "unsafeCreate", Publisher.class));
// needs special param validation due to (long)start + end - 1 <= Integer.MAX_VALUE
addIgnore(new ParamIgnore(Observable.class, "range", Integer.TYPE, Integer.TYPE));
addIgnore(new ParamIgnore(Observable.class, "rangeLong", Long.TYPE, Long.TYPE));
addIgnore(new ParamIgnore(Observable.class, "intervalRange", Long.TYPE, Long.TYPE, Long.TYPE, TimeUnit.class));
addIgnore(new ParamIgnore(Observable.class, "intervalRange", Long.TYPE, Long.TYPE, Long.TYPE, Long.TYPE, TimeUnit.class));
addIgnore(new ParamIgnore(Observable.class, "intervalRange", Long.TYPE, Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addIgnore(new ParamIgnore(Observable.class, "intervalRange", Long.TYPE, Long.TYPE, Long.TYPE, Long.TYPE, TimeUnit.class, Scheduler.class));
addIgnore(new ParamIgnore(Observable.class, "unsafeCreate", ObservableSource.class));
addIgnore(new ParamIgnore(Maybe.class, "unsafeCreate", MaybeSource.class));
addIgnore(new ParamIgnore(Single.class, "unsafeCreate", SingleSource.class));
addIgnore(new ParamIgnore(Completable.class, "unsafeCreate", CompletableSource.class));
// -----------------------------------------------------------------------------------
defaultValues = new HashMap<>();
defaultValues.put(Publisher.class, new NeverPublisher());
defaultValues.put(Flowable.class, new NeverPublisher());
defaultValues.put(ObservableSource.class, new NeverObservable());
defaultValues.put(Observable.class, new NeverObservable());
defaultValues.put(SingleSource.class, new NeverSingle());
defaultValues.put(Single.class, new NeverSingle());
defaultValues.put(MaybeSource.class, new NeverMaybe());
defaultValues.put(Maybe.class, new NeverMaybe());
defaultValues.put(CompletableSource.class, new NeverCompletable());
defaultValues.put(Completable.class, new NeverCompletable());
defaultValues.put(Action.class, Functions.EMPTY_ACTION);
defaultValues.put(Runnable.class, Functions.EMPTY_RUNNABLE);
defaultValues.put(Consumer.class, Functions.emptyConsumer());
defaultValues.put(LongConsumer.class, Functions.EMPTY_LONG_CONSUMER);
defaultValues.put(Function.class, Functions.justFunction(1));
defaultValues.put(Callable.class, Functions.justCallable(1));
defaultValues.put(Supplier.class, Functions.justSupplier(1));
defaultValues.put(Iterable.class, Collections.emptyList());
defaultValues.put(Object.class, 1);
defaultValues.put(Class.class, Integer.class);
Object af = new AllFunctionals();
for (Class<?> interfaces : AllFunctionals.class.getInterfaces()) {
defaultValues.put(interfaces, af);
}
defaultValues.put(Subscriber.class, af);
defaultValues.put(TimeUnit.class, TimeUnit.SECONDS);
defaultValues.put(Scheduler.class, Schedulers.single());
defaultValues.put(BackpressureStrategy.class, BackpressureStrategy.MISSING);
defaultValues.put(BackpressureOverflowStrategy.class, BackpressureOverflowStrategy.ERROR);
defaultValues.put(Throwable.class, new TestException());
defaultValues.put(Publisher[].class, new Publisher[] { new NeverPublisher(), new NeverPublisher() });
defaultValues.put(ObservableSource[].class, new ObservableSource[] { new NeverObservable(), new NeverObservable() });
defaultValues.put(SingleSource[].class, new SingleSource[] { new NeverSingle(), new NeverSingle() });
defaultValues.put(MaybeSource[].class, new MaybeSource[] { new NeverMaybe(), new NeverMaybe() });
defaultValues.put(CompletableSource[].class, new CompletableSource[] { new NeverCompletable(), new NeverCompletable() });
defaultValues.put(Object[].class, new Object[] { new Object(), new Object() });
defaultValues.put(Future.class, new FutureTask<Object>(Functions.EMPTY_RUNNABLE, 1));
defaultValues.put(ParallelFlowable.class, ParallelFlowable.from(Flowable.never()));
defaultValues.put(Subscriber[].class, new Subscriber[] { new AllFunctionals() });
defaultValues.put(ParallelFailureHandling.class, ParallelFailureHandling.ERROR);
defaultValues.put(DisposableContainer.class, new CompositeDisposable());
// JDK 8 types
defaultValues.put(Optional.class, Optional.of(1));
defaultValues.put(CompletionStage.class, CompletableFuture.completedFuture(1));
defaultValues.put(Stream.class, Stream.of(1, 2, 3));
defaultValues.put(Duration.class, Duration.ofSeconds(1));
defaultValues.put(Collector.class, Collectors.toList());
@SuppressWarnings("rawtypes")
| ParamValidationCheckerTest |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/type/CharacterTypeHandler.java | {
"start": 851,
"end": 1714
} | class ____ extends BaseTypeHandler<Character> {
@Override
public void setNonNullParameter(PreparedStatement ps, int i, Character parameter, JdbcType jdbcType)
throws SQLException {
ps.setString(i, parameter.toString());
}
@Override
public Character getNullableResult(ResultSet rs, String columnName) throws SQLException {
return toCharacter(rs.getString(columnName));
}
@Override
public Character getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
return toCharacter(rs.getString(columnIndex));
}
@Override
public Character getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
return toCharacter(cs.getString(columnIndex));
}
private Character toCharacter(String value) {
return value == null || value.isEmpty() ? null : value.charAt(0);
}
}
| CharacterTypeHandler |
java | apache__dubbo | dubbo-plugin/dubbo-auth/src/test/java/org/apache/dubbo/auth/AccessKeyAuthenticatorTest.java | {
"start": 1700,
"end": 5621
} | class ____ {
@Test
void testSignForRequest() {
URL url = URL.valueOf("dubbo://10.10.10.10:2181")
.addParameter(Constants.ACCESS_KEY_ID_KEY, "ak")
.addParameter(CommonConstants.APPLICATION_KEY, "test")
.addParameter(Constants.SECRET_ACCESS_KEY_KEY, "sk");
Invocation invocation = new RpcInvocation();
AccessKeyAuthenticator helper = mock(AccessKeyAuthenticator.class);
doCallRealMethod().when(helper).sign(invocation, url);
when(helper.getSignature(eq(url), eq(invocation), eq("sk"), anyString()))
.thenReturn("dubbo");
AccessKeyPair accessKeyPair = mock(AccessKeyPair.class);
when(accessKeyPair.getSecretKey()).thenReturn("sk");
when(helper.getAccessKeyPair(invocation, url)).thenReturn(accessKeyPair);
helper.sign(invocation, url);
assertEquals(String.valueOf(invocation.getAttachment(CommonConstants.CONSUMER)), url.getApplication());
assertNotNull(invocation.getAttachments().get(Constants.REQUEST_SIGNATURE_KEY));
assertEquals(invocation.getAttachments().get(Constants.REQUEST_SIGNATURE_KEY), "dubbo");
}
@Test
void testAuthenticateRequest() throws RpcAuthenticationException {
URL url = URL.valueOf("dubbo://10.10.10.10:2181")
.addParameter(Constants.ACCESS_KEY_ID_KEY, "ak")
.addParameter(CommonConstants.APPLICATION_KEY, "test")
.addParameter(Constants.SECRET_ACCESS_KEY_KEY, "sk");
Invocation invocation = new RpcInvocation();
invocation.setAttachment(Constants.ACCESS_KEY_ID_KEY, "ak");
invocation.setAttachment(Constants.REQUEST_SIGNATURE_KEY, "dubbo");
invocation.setAttachment(Constants.REQUEST_TIMESTAMP_KEY, String.valueOf(System.currentTimeMillis()));
invocation.setAttachment(CommonConstants.CONSUMER, "test");
AccessKeyAuthenticator helper = mock(AccessKeyAuthenticator.class);
doCallRealMethod().when(helper).authenticate(invocation, url);
when(helper.getSignature(eq(url), eq(invocation), eq("sk"), anyString()))
.thenReturn("dubbo");
AccessKeyPair accessKeyPair = mock(AccessKeyPair.class);
when(accessKeyPair.getSecretKey()).thenReturn("sk");
when(helper.getAccessKeyPair(invocation, url)).thenReturn(accessKeyPair);
assertDoesNotThrow(() -> helper.authenticate(invocation, url));
}
@Test
void testAuthenticateRequestNoSignature() {
URL url = URL.valueOf("dubbo://10.10.10.10:2181")
.addParameter(Constants.ACCESS_KEY_ID_KEY, "ak")
.addParameter(CommonConstants.APPLICATION_KEY, "test")
.addParameter(Constants.SECRET_ACCESS_KEY_KEY, "sk");
Invocation invocation = new RpcInvocation();
AccessKeyAuthenticator helper = new AccessKeyAuthenticator(FrameworkModel.defaultModel());
assertThrows(RpcAuthenticationException.class, () -> helper.authenticate(invocation, url));
}
@Test
void testGetAccessKeyPairFailed() {
URL url = URL.valueOf("dubbo://10.10.10.10:2181").addParameter(Constants.ACCESS_KEY_ID_KEY, "ak");
AccessKeyAuthenticator helper = new AccessKeyAuthenticator(FrameworkModel.defaultModel());
Invocation invocation = mock(Invocation.class);
assertThrows(RuntimeException.class, () -> helper.getAccessKeyPair(invocation, url));
}
@Test
void testGetSignatureNoParameter() {
URL url = mock(URL.class);
Invocation invocation = mock(Invocation.class);
String secretKey = "123456";
AccessKeyAuthenticator helper = new AccessKeyAuthenticator(FrameworkModel.defaultModel());
String signature = helper.getSignature(url, invocation, secretKey, String.valueOf(System.currentTimeMillis()));
assertNotNull(signature);
}
}
| AccessKeyAuthenticatorTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JdkObsoleteTest.java | {
"start": 3419,
"end": 3760
} | class ____ extends java.util.Dictionary<Object, Object> {}
}
""")
.doTest();
}
@Test
public void refactoring() {
BugCheckerRefactoringTestHelper.newInstance(JdkObsolete.class, getClass())
.addInputLines(
"in/Test.java",
"""
import java.util.*;
| D |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/attribute/RequestProtocolAttribute.java | {
"start": 175,
"end": 1018
} | class ____ implements ExchangeAttribute {
public static final String REQUEST_PROTOCOL_SHORT = "%H";
public static final String REQUEST_PROTOCOL = "%{PROTOCOL}";
public static final ExchangeAttribute INSTANCE = new RequestProtocolAttribute();
private RequestProtocolAttribute() {
}
@Override
public String readAttribute(final RoutingContext exchange) {
return getHttpVersionStr(exchange.request().version());
}
@Override
public void writeAttribute(final RoutingContext exchange, final String newValue) throws ReadOnlyAttributeException {
throw new ReadOnlyAttributeException("Request getProtocol", newValue);
}
static String getHttpVersionStr(HttpVersion version) {
// best effort to try and infer the HTTP version from
// any "unknown" | RequestProtocolAttribute |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassParser.java | {
"start": 19286,
"end": 20563
} | class ____ via ASM for deterministic declaration order...
// Unfortunately, the JVM's standard reflection returns methods in arbitrary
// order, even between different runs of the same application on the same JVM.
try {
AnnotationMetadata asm =
this.metadataReaderFactory.getMetadataReader(original.getClassName()).getAnnotationMetadata();
Set<MethodMetadata> asmMethods = asm.getAnnotatedMethods(Bean.class.getName());
if (asmMethods.size() >= beanMethods.size()) {
Set<MethodMetadata> candidateMethods = new LinkedHashSet<>(beanMethods);
Set<MethodMetadata> selectedMethods = CollectionUtils.newLinkedHashSet(asmMethods.size());
for (MethodMetadata asmMethod : asmMethods) {
for (Iterator<MethodMetadata> it = candidateMethods.iterator(); it.hasNext();) {
MethodMetadata beanMethod = it.next();
if (beanMethod.getMethodName().equals(asmMethod.getMethodName())) {
selectedMethods.add(beanMethod);
it.remove();
break;
}
}
}
if (selectedMethods.size() == beanMethods.size()) {
// All reflection-detected methods found in ASM method set -> proceed
beanMethods = selectedMethods;
}
}
}
catch (IOException ex) {
logger.debug("Failed to read | file |
java | playframework__playframework | core/play-integration-test/src/test/java/play/it/http/ActionCompositionOrderTest.java | {
"start": 1970,
"end": 2414
} | class ____ extends Action<WithUsername> {
@Override
public CompletionStage<Result> call(Http.Request req) {
return delegate.call(req.addAttr(Security.USERNAME, configuration.value()));
}
}
@With({FirstAction.class, SecondAction.class}) // let's run two actions
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Repeatable(SomeRepeatable.List.class)
public static @ | WithUsernameAction |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt1Evaluator.java | {
"start": 1085,
"end": 3959
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt1Evaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator field;
private final int p0;
private final DriverContext driverContext;
private Warnings warnings;
public RoundToInt1Evaluator(Source source, EvalOperator.ExpressionEvaluator field, int p0,
DriverContext driverContext) {
this.source = source;
this.field = field;
this.p0 = p0;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (IntBlock fieldBlock = (IntBlock) field.eval(page)) {
IntVector fieldVector = fieldBlock.asVector();
if (fieldVector == null) {
return eval(page.getPositionCount(), fieldBlock);
}
return eval(page.getPositionCount(), fieldVector).asBlock();
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += field.baseRamBytesUsed();
return baseRamBytesUsed;
}
public IntBlock eval(int positionCount, IntBlock fieldBlock) {
try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (fieldBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
int field = fieldBlock.getInt(fieldBlock.getFirstValueIndex(p));
result.appendInt(RoundToInt.process(field, this.p0));
}
return result.build();
}
}
public IntVector eval(int positionCount, IntVector fieldVector) {
try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
int field = fieldVector.getInt(p);
result.appendInt(p, RoundToInt.process(field, this.p0));
}
return result.build();
}
}
@Override
public String toString() {
return "RoundToInt1Evaluator[" + "field=" + field + ", p0=" + p0 + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(field);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | RoundToInt1Evaluator |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/sql/oracle/demo/Demo_getTable.java | {
"start": 350,
"end": 3202
} | class ____ extends TestCase {
public void test_for_demo() throws Exception {
String sql = " CREATE TABLE \"ZEUS\".\"ACCOUNTS_DEL_20091231\" \n" +
" ( \"ID\" NUMBER NOT NULL ENABLE, \n" +
" \"SITE\" VARCHAR2(96) NOT NULL ENABLE, \n" +
" \"GMT_CREATE\" DATE NOT NULL ENABLE, \n" +
" \"CREATOR\" VARCHAR2(96), \n" +
" \"GMT_MODIFIED\" DATE NOT NULL ENABLE, \n" +
" \"MODIFIER\" VARCHAR2(96), \n" +
" \"IS_DELETED\" CHAR(1), \n" +
" \"CONTRACT_SERIAL\" VARCHAR2(192), \n" +
" \"MEMBER_ID\" VARCHAR2(60), \n" +
" \"CUSTOMER_ID\" NUMBER, \n" +
" \"PRODUCT_ID\" VARCHAR2(48), \n" +
" \"PRODUCT_PIC_NUM\" NUMBER, \n" +
" \"DOMAIN_NAME\" VARCHAR2(96), \n" +
" \"EMAIL\" VARCHAR2(384), \n" +
" \"ALT_EMAIL\" VARCHAR2(384), \n" +
" \"AV_STATUS\" VARCHAR2(48), \n" +
" \"COMPANY_STATUS\" VARCHAR2(48), \n" +
" \"PRODUCT_STATUS\" VARCHAR2(48), \n" +
" \"COLUMN_STATUS\" VARCHAR2(48), \n" +
" \"COL_CONTENT_STATUS\" VARCHAR2(48), \n" +
" \"VOICE_RECORD_STATUS\" VARCHAR2(48), \n" +
" \"CASH_STATUS\" VARCHAR2(48), \n" +
" \"CONFIRM_PRODUCT_NUM\" NUMBER, \n" +
" \"DATUM_IMPORT\" CHAR(1), \n" +
" \"CUSTOMER_CHECK\" CHAR(1), \n" +
" \"AREA_ID_2\" NUMBER, \n" +
" \"OWNER_2\" VARCHAR2(96), \n" +
" \"DISTRIBUTE_DATE\" DATE, \n" +
" \"CUST_CHECK_DATE\" DATE, \n" +
" \"DATUM_IMPORT_DATE\" DATE, \n" +
" \"VALIDATE_DATE\" DATE, \n" +
" \"REMARK_1\" VARCHAR2(4000), \n" +
" \"REMARK_2\" VARCHAR2(768), \n" +
" \"REMARK_3\" VARCHAR2(768), \n" +
" \"PASSWORD\" VARCHAR2(96)\n" +
" ) SEGMENT CREATION IMMEDIATE \n" +
" PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING\n" +
" STORAGE(INITIAL 131072 NEXT 131072 MINEXTENTS 1 MAXEXTENTS 2147483645\n" +
" PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)\n" +
" TABLESPACE \"ZEUSDATA\" ";
List<SQLStatement> stmtList = SQLUtils.parseStatements(sql, JdbcConstants.ORACLE);
SQLCreateTableStatement stmt = (SQLCreateTableStatement) stmtList.get(0);
SQLName tableName = stmt.getName();
System.out.println(tableName.toString());
assertEquals("\"ZEUS\".\"ACCOUNTS_DEL_20091231\"", tableName.toString());
}
}
| Demo_getTable |
java | apache__camel | components/camel-smpp/src/test/java/org/apache/camel/component/smpp/SmppProducerLazySessionCreationTest.java | {
"start": 1417,
"end": 1496
} | class ____ <code>org.apache.camel.component.smpp.SmppProducer</code>
*/
public | for |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/SealedTypesWithPolymorphicDeductionTest.java | {
"start": 8210,
"end": 8347
} | class ____ extends AmbiguousCat {
public boolean angry;
}
// No distinguishing properties whatsoever
static final | AmbiguousLiveCat |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/route/RedisRouteDefinitionRepository.java | {
"start": 1210,
"end": 3426
} | class ____ implements RouteDefinitionRepository {
private static final Logger log = LoggerFactory.getLogger(RedisRouteDefinitionRepository.class);
/**
* Key prefix for RouteDefinition queries to redis.
*/
private static final String ROUTEDEFINITION_REDIS_KEY_PREFIX_QUERY = "routedefinition_";
private ReactiveRedisTemplate<String, RouteDefinition> reactiveRedisTemplate;
private ReactiveValueOperations<String, RouteDefinition> routeDefinitionReactiveValueOperations;
public RedisRouteDefinitionRepository(ReactiveRedisTemplate<String, RouteDefinition> reactiveRedisTemplate) {
this.reactiveRedisTemplate = reactiveRedisTemplate;
this.routeDefinitionReactiveValueOperations = reactiveRedisTemplate.opsForValue();
}
@Override
public Flux<RouteDefinition> getRouteDefinitions() {
return reactiveRedisTemplate.scan(ScanOptions.scanOptions().match(createKey("*")).build())
.flatMap(key -> reactiveRedisTemplate.opsForValue().get(key))
.onErrorContinue((throwable, routeDefinition) -> {
if (log.isErrorEnabled()) {
log.error("get routes from redis error cause : {}", throwable.toString(), throwable);
}
});
}
@Override
public Mono<Void> save(Mono<RouteDefinition> route) {
return route.flatMap(routeDefinition -> {
Objects.requireNonNull(routeDefinition.getId(), "id may not be null");
return routeDefinitionReactiveValueOperations.set(createKey(routeDefinition.getId()), routeDefinition)
.flatMap(success -> {
if (success) {
return Mono.empty();
}
return Mono.defer(() -> Mono.error(new RuntimeException(
String.format("Could not add route to redis repository: %s", routeDefinition))));
});
});
}
@Override
public Mono<Void> delete(Mono<String> routeId) {
return routeId.flatMap(id -> routeDefinitionReactiveValueOperations.delete(createKey(id)).flatMap(success -> {
if (success) {
return Mono.empty();
}
return Mono.defer(() -> Mono.error(new NotFoundException(
String.format("Could not remove route from redis repository with id: %s", routeId))));
}));
}
private String createKey(String routeId) {
return ROUTEDEFINITION_REDIS_KEY_PREFIX_QUERY + routeId;
}
}
| RedisRouteDefinitionRepository |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/executor/loader/SerializableProxyTest.java | {
"start": 8524,
"end": 8928
} | class ____ extends Author {
public AuthorWithWriteReplaceMethod() {
}
AuthorWithWriteReplaceMethod(Integer id, String username, String password, String email, String bio,
Section section) {
super(id, username, password, email, bio, section);
}
Object writeReplace() throws ObjectStreamException {
return this;
}
}
public static | AuthorWithWriteReplaceMethod |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/NestedTestConfiguration.java | {
"start": 7393,
"end": 7484
} | enum ____ with the supplied
* name, ignoring case.
* @param name the name of the | constant |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/body/CloseableAvailableByteBody.java | {
"start": 914,
"end": 1301
} | interface ____ extends AvailableByteBody, CloseableByteBody {
/**
* {@inheritDoc}
*
* @deprecated This method is unnecessary for {@link AvailableByteBody}, it does nothing.
*/
@SuppressWarnings("deprecation")
@Override
@NonNull
@Deprecated
default CloseableAvailableByteBody allowDiscard() {
return this;
}
}
| CloseableAvailableByteBody |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaTruncatorImpl.java | {
"start": 1699,
"end": 8614
} | class ____ extends AbstractSchemaPopulator implements SchemaTruncator {
private static final Logger LOG = Logger.getLogger( SchemaTruncatorImpl.class );
private final HibernateSchemaManagementTool tool;
private final SchemaFilter schemaFilter;
public SchemaTruncatorImpl(HibernateSchemaManagementTool tool, SchemaFilter truncatorFilter) {
this.tool = tool;
schemaFilter = truncatorFilter;
}
@Override
public void doTruncate(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
TargetDescriptor targetDescriptor) {
final var configuration = options.getConfigurationValues();
final var jdbcContext = tool.resolveJdbcContext(configuration);
final var targets =
tool.buildGenerationTargets( targetDescriptor, jdbcContext, configuration,
true ); //we need autocommit on for DB2 at least
doTruncate( metadata, options, contributableInclusionFilter, jdbcContext.getDialect(), targets );
}
@Internal
public void doTruncate(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
GenerationTarget... targets) {
for ( var target : targets ) {
target.prepare();
}
try {
performTruncate( metadata, options, contributableInclusionFilter, dialect, targets );
}
finally {
for ( var target : targets ) {
try {
target.release();
}
catch (Exception e) {
LOG.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() );
}
}
}
}
private void performTruncate(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
GenerationTarget... targets) {
final boolean format = interpretFormattingEnabled( options.getConfigurationValues() );
final var formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
truncateFromMetadata( metadata, options, schemaFilter, contributableInclusionFilter, dialect, formatter, targets );
}
private void truncateFromMetadata(
Metadata metadata,
ExecutionOptions options,
SchemaFilter schemaFilter,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
final var database = metadata.getDatabase();
final var context = createSqlStringGenerationContext( options, metadata );
final Set<String> exportIdentifiers = setOfSize( 50 );
for ( var namespace : database.getNamespaces() ) {
if ( schemaFilter.includeNamespace( namespace ) ) {
disableConstraints( namespace, metadata, formatter, options, schemaFilter, context,
contributableInclusionFilter, targets );
applySqlString( dialect.getTableCleaner().getSqlBeforeString(), formatter, options, targets );
// now it's safe to drop the tables
final List<Table> tablesToTruncate = new ArrayList<>( namespace.getTables().size() );
for ( var table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& schemaFilter.includeTable( table )
&& contributableInclusionFilter.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers );
tablesToTruncate.add( table );
}
}
applySqlStrings(
dialect.getTableCleaner().getSqlTruncateStrings( tablesToTruncate, metadata, context ),
formatter, options, targets
);
// reset sequences back to their initial values
for ( var table : tablesToTruncate ) {
for ( var command : table.getResetCommands( context ) ) {
applySqlStrings( command.initCommands(), formatter, options, targets );
}
}
applySqlString( dialect.getTableCleaner().getSqlAfterString(), formatter, options, targets );
enableConstraints( namespace, metadata, formatter, options, schemaFilter, context,
contributableInclusionFilter, targets );
}
}
final var commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class );
final boolean format = interpretFormattingEnabled( options.getConfigurationValues() );
applyImportSources( options, commandExtractor, format, dialect, targets );
}
private void disableConstraints(
Namespace namespace,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SchemaFilter schemaFilter,
SqlStringGenerationContext context,
ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) {
final var dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
for ( var table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& schemaFilter.includeTable( table )
&& contributableInclusionFilter.matches( table ) ) {
for ( var foreignKey : table.getForeignKeyCollection() ) {
if ( dialect.canDisableConstraints() ) {
applySqlString(
dialect.getTableCleaner()
.getSqlDisableConstraintString( foreignKey, metadata, context ),
formatter,
options,
targets
);
}
else if ( !dialect.canBatchTruncate() ) {
applySqlStrings(
dialect.getForeignKeyExporter()
.getSqlDropStrings( foreignKey, metadata, context ),
formatter,
options,
targets
);
}
}
}
}
}
private void enableConstraints(
Namespace namespace,
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
SchemaFilter schemaFilter,
SqlStringGenerationContext context,
ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) {
final var dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
for ( var table : namespace.getTables() ) {
if ( table.isPhysicalTable()
&& schemaFilter.includeTable( table )
&& contributableInclusionFilter.matches( table ) ) {
for ( var foreignKey : table.getForeignKeyCollection() ) {
if ( dialect.canDisableConstraints() ) {
applySqlString(
dialect.getTableCleaner().getSqlEnableConstraintString( foreignKey, metadata, context ),
formatter,
options,
targets
);
}
else if ( !dialect.canBatchTruncate() ) {
applySqlStrings(
dialect.getForeignKeyExporter().getSqlCreateStrings( foreignKey, metadata, context ),
formatter,
options,
targets
);
}
}
}
}
}
private static void checkExportIdentifier(Exportable exportable, Set<String> exportIdentifiers) {
final String exportIdentifier = exportable.getExportIdentifier();
if ( exportIdentifiers.contains( exportIdentifier ) ) {
throw new SchemaManagementException( "SQL strings added more than once for: " + exportIdentifier );
}
exportIdentifiers.add( exportIdentifier );
}
@Override
ClassLoaderService getClassLoaderService() {
return tool.getServiceRegistry().getService( ClassLoaderService.class );
}
}
| SchemaTruncatorImpl |
java | apache__camel | components/camel-xj/src/main/java/org/apache/camel/component/xj/XJEndpoint.java | {
"start": 1702,
"end": 5242
} | class ____ extends XsltSaxonEndpoint {
private final JsonFactory jsonFactory = new JsonFactory();
@UriParam
@Metadata(required = true, description = "Transform direction. Either XML2JSON or JSON2XML")
private TransformDirection transformDirection;
public XJEndpoint(String endpointUri, Component component) {
super(endpointUri, component);
}
@Override
public boolean isRemote() {
return false;
}
@ManagedAttribute(description = "Transform direction")
public TransformDirection getTransformDirection() {
return transformDirection;
}
/**
* Sets the transform direction.
*/
public void setTransformDirection(TransformDirection transformDirection) {
this.transformDirection = transformDirection;
}
@Override
protected void doInit() throws Exception {
if ("identity".equalsIgnoreCase(getResourceUri())) {
// Using a stylesheet for "identity" transform is slow. but with a {@link TransformerFactory}
// we can't get an identity transformer. But for now we leave it that way.
setResourceUri("org/apache/camel/component/xj/identity.xsl");
}
super.doInit();
}
@Override
protected XsltSaxonBuilder createXsltBuilder() throws Exception {
final XsltSaxonBuilder xsltBuilder = super.createXsltBuilder();
xsltBuilder.setAllowStAX(true); // we rely on stax so always to true.
configureInput(xsltBuilder);
return xsltBuilder;
}
/**
* Configures the source input depending on the {@link XJEndpoint#transformDirection}
*/
protected void configureInput(XsltBuilder xsltBuilder) {
if (TransformDirection.JSON2XML == this.transformDirection) {
final JsonSourceHandlerFactoryImpl sourceHandlerFactory = new JsonSourceHandlerFactoryImpl(jsonFactory);
sourceHandlerFactory.setFailOnNullBody(isFailOnNullBody());
xsltBuilder.setSourceHandlerFactory(sourceHandlerFactory);
}
// in the other direction, XML2JSON, the default org.apache.camel.component.xslt.XmlSourceHandlerFactoryImpl will be used
}
/**
* Configures the result output depending on the {@link XJEndpoint#transformDirection}
*/
@Override
protected void configureOutput(XsltBuilder xsltBuilder, String output) throws Exception {
switch (this.transformDirection) {
case JSON2XML:
super.configureOutput(xsltBuilder, output);
break;
case XML2JSON:
configureJsonOutput(xsltBuilder, output);
break;
default:
throw new IllegalArgumentException("Unknown transformation direction: " + this.transformDirection);
}
}
/**
* Configures the result output when transforming to JSON
*/
protected void configureJsonOutput(XsltBuilder xsltBuilder, String output) {
if ("DOM".equals(output)) {
throw new UnsupportedOperationException("DOM output not supported when transforming to json");
} else if ("bytes".equals(output)) {
xsltBuilder.setResultHandlerFactory(new JsonStreamResultHandlerFactory(jsonFactory));
} else if ("file".equals(output)) {
xsltBuilder.setResultHandlerFactory(new JsonFileResultHandlerFactory(jsonFactory));
} else {
xsltBuilder.setResultHandlerFactory(new JsonStringResultHandlerFactory(jsonFactory));
}
}
}
| XJEndpoint |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/lazyload_proxyfactory_comparison/UserWithGetXxxWithoutInterface.java | {
"start": 728,
"end": 1174
} | class ____ {
private Integer id;
private String name;
private Group owner;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Group getOwner() {
return owner;
}
public void setOwner(Group owner) {
this.owner = owner;
}
}
| UserWithGetXxxWithoutInterface |
java | apache__camel | components/camel-jetty/src/test/java/org/apache/camel/component/jetty/JettyHttpContentTypeTest.java | {
"start": 1127,
"end": 3597
} | class ____ extends BaseJettyTest {
private static final String CHARSET = StandardCharsets.ISO_8859_1.name();
@Test
public void testContentType() throws Exception {
getMockEndpoint("mock:input").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:input").expectedHeaderReceived(Exchange.CONTENT_TYPE,
"text/plain; charset=" + CHARSET.toLowerCase());
getMockEndpoint("mock:input").expectedHeaderReceived(Exchange.HTTP_CHARACTER_ENCODING, CHARSET);
getMockEndpoint("mock:input").expectedHeaderReceived(Exchange.HTTP_URL, "http://127.0.0.1:" + getPort() + "/foo");
getMockEndpoint("mock:input").expectedPropertyReceived(Exchange.CHARSET_NAME, CHARSET);
byte[] data = "Hello World".getBytes(StandardCharsets.ISO_8859_1);
String out = template.requestBodyAndHeader("http://127.0.0.1:{{port}}/foo", data, "content-type",
"text/plain; charset=\"" + CHARSET + "\"", String.class);
assertEquals("Bye World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testContentTypeWithAction() throws Exception {
getMockEndpoint("mock:input").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:input").expectedHeaderReceived(Exchange.CONTENT_TYPE,
"text/plain; charset=" + CHARSET.toLowerCase() + "; action=\"http://somewhere.com/foo\"");
getMockEndpoint("mock:input").expectedHeaderReceived(Exchange.HTTP_CHARACTER_ENCODING, "ISO-8859-1");
getMockEndpoint("mock:input").expectedHeaderReceived(Exchange.HTTP_URL, "http://127.0.0.1:" + getPort() + "/foo");
getMockEndpoint("mock:input").expectedPropertyReceived(Exchange.CHARSET_NAME, "ISO-8859-1");
byte[] data = "Hello World".getBytes(StandardCharsets.ISO_8859_1);
String out = template.requestBodyAndHeader("http://127.0.0.1:{{port}}/foo", data, "content-type",
"text/plain;charset=\"" + CHARSET + "\";action=\"http://somewhere.com/foo\"", String.class);
assertEquals("Bye World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("jetty:http://127.0.0.1:{{port}}/foo").to("mock:input").transform().constant("Bye World");
}
};
}
}
| JettyHttpContentTypeTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/ganglia/TestGangliaSink.java | {
"start": 1269,
"end": 3733
} | class ____ {
@Test
public void testShouldCreateDatagramSocketByDefault() throws Exception {
SubsetConfiguration conf = new ConfigBuilder().subset("test.sink.ganglia");
GangliaSink30 gangliaSink = new GangliaSink30();
gangliaSink.init(conf);
DatagramSocket socket = gangliaSink.getDatagramSocket();
assertFalse(socket == null || socket instanceof MulticastSocket,
"Did not create DatagramSocket");
}
@Test
public void testShouldCreateDatagramSocketIfMulticastIsDisabled() throws Exception {
SubsetConfiguration conf =
new ConfigBuilder().add("test.sink.ganglia.multicast", false).subset("test.sink.ganglia");
GangliaSink30 gangliaSink = new GangliaSink30();
gangliaSink.init(conf);
DatagramSocket socket = gangliaSink.getDatagramSocket();
assertFalse(socket == null || socket instanceof MulticastSocket,
"Did not create DatagramSocket");
}
@Test
public void testShouldCreateMulticastSocket() throws Exception {
SubsetConfiguration conf =
new ConfigBuilder().add("test.sink.ganglia.multicast", true).subset("test.sink.ganglia");
GangliaSink30 gangliaSink = new GangliaSink30();
gangliaSink.init(conf);
DatagramSocket socket = gangliaSink.getDatagramSocket();
assertTrue(socket != null && socket instanceof MulticastSocket,
"Did not create MulticastSocket");
int ttl = ((MulticastSocket) socket).getTimeToLive();
assertEquals(1, ttl, "Did not set default TTL");
}
@Test
public void testShouldSetMulticastSocketTtl() throws Exception {
SubsetConfiguration conf = new ConfigBuilder().add("test.sink.ganglia.multicast", true)
.add("test.sink.ganglia.multicast.ttl", 3).subset("test.sink.ganglia");
GangliaSink30 gangliaSink = new GangliaSink30();
gangliaSink.init(conf);
DatagramSocket socket = gangliaSink.getDatagramSocket();
assertTrue(socket != null && socket instanceof MulticastSocket,
"Did not create MulticastSocket");
int ttl = ((MulticastSocket) socket).getTimeToLive();
assertEquals(3, ttl, "Did not set TTL");
}
@Test
public void testMultipleMetricsServers() {
SubsetConfiguration conf =
new ConfigBuilder().add("test.sink.ganglia.servers", "server1,server2")
.subset("test.sink.ganglia");
GangliaSink30 gangliaSink = new GangliaSink30();
gangliaSink.init(conf);
assertEquals(2, gangliaSink.getMetricsServers().size());
}
}
| TestGangliaSink |
java | apache__flink | flink-test-utils-parent/flink-test-utils/src/test/java/org/apache/flink/networking/EchoServer.java | {
"start": 1150,
"end": 2595
} | class ____ extends Thread implements AutoCloseable {
private final ServerSocket serverSocket = new ServerSocket(0);
private final int socketTimeout;
private final List<EchoWorkerThread> workerThreads =
Collections.synchronizedList(new ArrayList<>());
private volatile boolean close = false;
private Exception threadException;
public EchoServer(int socketTimeout) throws IOException {
serverSocket.setSoTimeout(socketTimeout);
this.socketTimeout = socketTimeout;
}
public int getLocalPort() {
return serverSocket.getLocalPort();
}
@Override
public void run() {
while (!close) {
try {
// We are NOT using NetUtils.acceptWithoutTimeout here as this ServerSocket sets
// a timeout.
EchoWorkerThread thread =
new EchoWorkerThread(serverSocket.accept(), socketTimeout);
thread.start();
} catch (IOException e) {
threadException = e;
}
}
}
@Override
public void close() throws Exception {
for (EchoWorkerThread thread : workerThreads) {
thread.close();
thread.join();
}
close = true;
if (threadException != null) {
throw threadException;
}
serverSocket.close();
this.join();
}
private static | EchoServer |
java | apache__hadoop | hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java | {
"start": 1388,
"end": 2328
} | class ____ implements Filter {
private static Logger LOG = LoggerFactory.getLogger(RequestLoggerFilter.class);
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
throws IOException, ServletException {
if (!LOG.isDebugEnabled()) {
filterChain.doFilter(request, response);
}
else {
XHttpServletRequest xRequest = new XHttpServletRequest((HttpServletRequest) request);
XHttpServletResponse xResponse = new XHttpServletResponse((HttpServletResponse) response);
try {
LOG.debug(xRequest.getResquestInfo().toString());
filterChain.doFilter(xRequest, xResponse);
}
finally {
LOG.debug(xResponse.getResponseInfo().toString());
}
}
}
@Override
public void destroy() {
}
private static | RequestLoggerFilter |
java | apache__dubbo | dubbo-plugin/dubbo-qos/src/main/java/org/apache/dubbo/qos/command/impl/Ready.java | {
"start": 1483,
"end": 2944
} | class ____ implements BaseCommand {
private final FrameworkModel frameworkModel;
public Ready(FrameworkModel frameworkModel) {
this.frameworkModel = frameworkModel;
}
@Override
public String execute(CommandContext commandContext, String[] args) {
String config = frameworkModel.getApplicationModels().stream()
.map(applicationModel ->
applicationModel.getApplicationConfigManager().getApplication())
.map(o -> o.orElse(null))
.filter(Objects::nonNull)
.map(ApplicationConfig::getReadinessProbe)
.filter(Objects::nonNull)
.collect(Collectors.joining(","));
URL url = URL.valueOf("application://").addParameter(CommonConstants.QOS_READY_PROBE_EXTENSION, config);
List<ReadinessProbe> readinessProbes = frameworkModel
.getExtensionLoader(ReadinessProbe.class)
.getActivateExtension(url, CommonConstants.QOS_READY_PROBE_EXTENSION);
if (!readinessProbes.isEmpty()) {
for (ReadinessProbe readinessProbe : readinessProbes) {
if (!readinessProbe.check()) {
// 503 Service Unavailable
commandContext.setHttpCode(503);
return "false";
}
}
}
// 200 OK
commandContext.setHttpCode(200);
return "true";
}
}
| Ready |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/builder/multiple/ErroneousMoreThanOneBuildMethodWithMapperDefinedMappingMapper.java | {
"start": 416,
"end": 525
} | interface ____ {
Process map(Source source);
}
| ErroneousMoreThanOneBuildMethodWithMapperDefinedMappingMapper |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/feature/InitStringFieldAsEmptyTest.java | {
"start": 886,
"end": 985
} | class ____ {
public Model() {
}
public String value;
}
}
| Model |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/util/EnumResolver.java | {
"start": 1224,
"end": 2437
} | enum ____ to match are from {@code @JsonValue}-annotated
* method.
*
* @since 2.20
*/
protected final boolean _hasAsValueAnnotation;
/*
/**********************************************************************
/* Constructors
/**********************************************************************
*/
protected EnumResolver(Class<Enum<?>> enumClass, Enum<?>[] enums,
HashMap<String, Enum<?>> enumsById, Enum<?> defaultValue,
boolean isIgnoreCase, boolean isFromIntValue,
boolean hasAsValueAnnotation)
{
_enumClass = enumClass;
_enums = enums;
_enumsById = enumsById;
_defaultValue = defaultValue;
_isIgnoreCase = isIgnoreCase;
_isFromIntValue = isFromIntValue;
_hasAsValueAnnotation = hasAsValueAnnotation;
}
/*
/**********************************************************************
/* Factory methods
/**********************************************************************
*/
/**
* Factory method for constructing an {@link EnumResolver} based on the given {@link DeserializationConfig} and
* {@link AnnotatedClass} of the | values |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/contextual/ContextualDeserializationTest.java | {
"start": 2446,
"end": 3318
} | class ____
extends ValueDeserializer<StringValue>
{
protected final String _fieldName;
public MyContextualDeserializer() { this(""); }
public MyContextualDeserializer(String fieldName) {
_fieldName = fieldName;
}
@Override
public StringValue deserialize(JsonParser jp, DeserializationContext ctxt)
{
return new StringValue(""+_fieldName+"="+jp.getString());
}
@Override
public ValueDeserializer<?> createContextual(DeserializationContext ctxt,
BeanProperty property)
{
String name = (property == null) ? "NULL" : property.getName();
return new MyContextualDeserializer(name);
}
}
/**
* Alternative that uses annotation for choosing name to use
*/
static | MyContextualDeserializer |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/bindinggraphvalidation/PackageNameCompressor.java | {
"start": 1307,
"end": 2132
} | class ____ {
static final String LEGEND_HEADER =
"\n\n======================\nFull classname legend:\n======================\n";
static final String LEGEND_FOOTER =
"========================\nEnd of classname legend:\n========================\n";
private static final ImmutableSet<String> PACKAGES_SKIPPED_IN_LEGEND = ImmutableSet.of(
"java.lang.",
"java.util.");
private static final Splitter PACKAGE_SPLITTER = Splitter.on('.');
private static final Joiner PACKAGE_JOINER = Joiner.on('.');
// TODO(erichang): Consider validating this regex by also passing in all of the known types from
// keys, module names, component names, etc and checking against that list. This may have some
// extra complications with taking apart types like List<Foo> to get the inner | PackageNameCompressor |
java | apache__rocketmq | broker/src/main/java/org/apache/rocketmq/broker/processor/NotificationProcessor.java | {
"start": 2261,
"end": 11465
} | class ____ implements NettyRequestProcessor {
private static final Logger POP_LOGGER = LoggerFactory.getLogger(LoggerName.ROCKETMQ_POP_LOGGER_NAME);
private final BrokerController brokerController;
private final Random random = new Random(System.currentTimeMillis());
private final PopLongPollingService popLongPollingService;
private static final String BORN_TIME = "bornTime";
public NotificationProcessor(final BrokerController brokerController) {
this.brokerController = brokerController;
this.popLongPollingService = new PopLongPollingService(brokerController, this, true);
}
public void shutdown() throws Exception {
this.popLongPollingService.shutdown();
}
@Override
public boolean rejectRequest() {
return false;
}
// When a new message is written to CommitLog, this method would be called.
// Suspended long polling will receive notification and be wakeup.
public void notifyMessageArriving(final String topic, final int queueId, long offset,
Long tagsCode, long msgStoreTime, byte[] filterBitMap, Map<String, String> properties) {
this.popLongPollingService.notifyMessageArrivingWithRetryTopic(
topic, queueId, offset, tagsCode, msgStoreTime, filterBitMap, properties);
}
public void notifyMessageArriving(final String topic, final int queueId) {
this.popLongPollingService.notifyMessageArrivingWithRetryTopic(topic, queueId);
}
@Override
public RemotingCommand processRequest(final ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
Channel channel = ctx.channel();
RemotingCommand response = RemotingCommand.createResponseCommand(NotificationResponseHeader.class);
final NotificationResponseHeader responseHeader = (NotificationResponseHeader) response.readCustomHeader();
final NotificationRequestHeader requestHeader =
request.decodeCommandCustomHeader(NotificationRequestHeader.class, true);
if (requestHeader.getBornTime() == 0) {
final long beginTimeMills = this.brokerController.getMessageStore().now();
request.addExtField(BORN_TIME, String.valueOf(beginTimeMills));
requestHeader.setBornTime(beginTimeMills);
}
response.setOpaque(request.getOpaque());
if (!PermName.isReadable(this.brokerController.getBrokerConfig().getBrokerPermission())) {
response.setCode(ResponseCode.NO_PERMISSION);
response.setRemark(String.format("the broker[%s] peeking message is forbidden", this.brokerController.getBrokerConfig().getBrokerIP1()));
return response;
}
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(requestHeader.getTopic());
if (null == topicConfig) {
POP_LOGGER.error("The topic {} not exist, consumer: {} ", requestHeader.getTopic(), RemotingHelper.parseChannelRemoteAddr(channel));
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark(String.format("topic[%s] not exist, apply first please! %s", requestHeader.getTopic(), FAQUrl.suggestTodo(FAQUrl.APPLY_TOPIC_URL)));
return response;
}
if (!PermName.isReadable(topicConfig.getPerm())) {
response.setCode(ResponseCode.NO_PERMISSION);
response.setRemark("the topic[" + requestHeader.getTopic() + "] peeking message is forbidden");
return response;
}
if (requestHeader.getQueueId() >= topicConfig.getReadQueueNums()) {
String errorInfo = String.format("queueId[%d] is illegal, topic:[%s] topicConfig.readQueueNums:[%d] consumer:[%s]",
requestHeader.getQueueId(), requestHeader.getTopic(), topicConfig.getReadQueueNums(), channel.remoteAddress());
POP_LOGGER.warn(errorInfo);
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark(errorInfo);
return response;
}
SubscriptionGroupConfig subscriptionGroupConfig = this.brokerController.getSubscriptionGroupManager().findSubscriptionGroupConfig(requestHeader.getConsumerGroup());
if (null == subscriptionGroupConfig) {
response.setCode(ResponseCode.SUBSCRIPTION_GROUP_NOT_EXIST);
response.setRemark(String.format("subscription group [%s] does not exist, %s", requestHeader.getConsumerGroup(), FAQUrl.suggestTodo(FAQUrl.SUBSCRIPTION_GROUP_NOT_EXIST)));
return response;
}
if (!subscriptionGroupConfig.isConsumeEnable()) {
response.setCode(ResponseCode.NO_PERMISSION);
response.setRemark("subscription group no permission, " + requestHeader.getConsumerGroup());
return response;
}
int randomQ = random.nextInt(100);
boolean hasMsg = false;
BrokerConfig brokerConfig = brokerController.getBrokerConfig();
if (requestHeader.getQueueId() < 0) {
// read all queue
hasMsg = hasMsgFromTopic(topicConfig, randomQ, requestHeader);
} else {
int queueId = requestHeader.getQueueId();
hasMsg = hasMsgFromQueue(topicConfig.getTopicName(), requestHeader, queueId);
}
// if it doesn't have message, fetch retry
if (!hasMsg) {
String retryTopic = KeyBuilder.buildPopRetryTopic(requestHeader.getTopic(), requestHeader.getConsumerGroup(), brokerConfig.isEnableRetryTopicV2());
hasMsg = hasMsgFromTopic(retryTopic, randomQ, requestHeader);
if (!hasMsg && brokerConfig.isEnableRetryTopicV2() && brokerConfig.isRetrieveMessageFromPopRetryTopicV1()) {
String retryTopicConfigV1 = KeyBuilder.buildPopRetryTopicV1(requestHeader.getTopic(), requestHeader.getConsumerGroup());
hasMsg = hasMsgFromTopic(retryTopicConfigV1, randomQ, requestHeader);
}
}
if (!hasMsg) {
PollingResult pollingResult = popLongPollingService.polling(ctx, request, new PollingHeader(requestHeader));
if (pollingResult == PollingResult.POLLING_SUC) {
return null;
} else if (pollingResult == PollingResult.POLLING_FULL) {
responseHeader.setPollingFull(true);
}
}
response.setCode(ResponseCode.SUCCESS);
responseHeader.setHasMsg(hasMsg);
return response;
}
private boolean hasMsgFromTopic(String topicName, int randomQ, NotificationRequestHeader requestHeader)
throws RemotingCommandException {
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(topicName);
return hasMsgFromTopic(topicConfig, randomQ, requestHeader);
}
private boolean hasMsgFromTopic(TopicConfig topicConfig, int randomQ, NotificationRequestHeader requestHeader)
throws RemotingCommandException {
boolean hasMsg;
if (topicConfig != null) {
for (int i = 0; i < topicConfig.getReadQueueNums(); i++) {
int queueId = (randomQ + i) % topicConfig.getReadQueueNums();
hasMsg = hasMsgFromQueue(topicConfig.getTopicName(), requestHeader, queueId);
if (hasMsg) {
return true;
}
}
}
return false;
}
private boolean hasMsgFromQueue(String targetTopic, NotificationRequestHeader requestHeader, int queueId) throws RemotingCommandException {
if (Boolean.TRUE.equals(requestHeader.getOrder())) {
if (this.brokerController.getConsumerOrderInfoManager().checkBlock(requestHeader.getAttemptId(), requestHeader.getTopic(), requestHeader.getConsumerGroup(), queueId, 0)) {
return false;
}
}
long offset = getPopOffset(targetTopic, requestHeader.getConsumerGroup(), queueId);
try {
long restNum = this.brokerController.getMessageStore().getMaxOffsetInQueue(targetTopic, queueId) - offset;
return restNum > 0;
} catch (ConsumeQueueException e) {
throw new RemotingCommandException("Failed tp get max offset in queue", e);
}
}
private long getPopOffset(String topic, String cid, int queueId) {
long offset = this.brokerController.getConsumerOffsetManager().queryOffset(cid, topic, queueId);
if (offset < 0) {
offset = this.brokerController.getMessageStore().getMinOffsetInQueue(topic, queueId);
}
long bufferOffset;
if (brokerController.getBrokerConfig().isPopConsumerKVServiceEnable()) {
bufferOffset = this.brokerController.getConsumerOffsetManager().queryPullOffset(cid, topic, queueId);
} else {
bufferOffset = this.brokerController.getPopMessageProcessor()
.getPopBufferMergeService().getLatestOffset(topic, cid, queueId);
}
return bufferOffset < 0L ? offset : Math.max(bufferOffset, offset);
}
public PopLongPollingService getPopLongPollingService() {
return popLongPollingService;
}
}
| NotificationProcessor |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableDoFinally.java | {
"start": 1873,
"end": 4494
} | class ____<T> extends BasicIntQueueSubscription<T> implements FlowableSubscriber<T> {
private static final long serialVersionUID = 4109457741734051389L;
final Subscriber<? super T> downstream;
final Action onFinally;
Subscription upstream;
QueueSubscription<T> qs;
boolean syncFused;
DoFinallySubscriber(Subscriber<? super T> actual, Action onFinally) {
this.downstream = actual;
this.onFinally = onFinally;
}
@SuppressWarnings("unchecked")
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
if (s instanceof QueueSubscription) {
this.qs = (QueueSubscription<T>)s;
}
downstream.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
downstream.onNext(t);
}
@Override
public void onError(Throwable t) {
downstream.onError(t);
runFinally();
}
@Override
public void onComplete() {
downstream.onComplete();
runFinally();
}
@Override
public void cancel() {
upstream.cancel();
runFinally();
}
@Override
public void request(long n) {
upstream.request(n);
}
@Override
public int requestFusion(int mode) {
QueueSubscription<T> qs = this.qs;
if (qs != null && (mode & BOUNDARY) == 0) {
int m = qs.requestFusion(mode);
if (m != NONE) {
syncFused = m == SYNC;
}
return m;
}
return NONE;
}
@Override
public void clear() {
qs.clear();
}
@Override
public boolean isEmpty() {
return qs.isEmpty();
}
@Nullable
@Override
public T poll() throws Throwable {
T v = qs.poll();
if (v == null && syncFused) {
runFinally();
}
return v;
}
void runFinally() {
if (compareAndSet(0, 1)) {
try {
onFinally.run();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
RxJavaPlugins.onError(ex);
}
}
}
}
static final | DoFinallySubscriber |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/TypeInference.java | {
"start": 1890,
"end": 7034
} | class ____ {
/** Format for both arguments and state entries. */
static final Predicate<String> PARAMETER_NAME_FORMAT =
Pattern.compile("^[a-zA-Z_$][a-zA-Z_$0-9]*$").asPredicate();
private final @Nullable List<StaticArgument> staticArguments;
private final InputTypeStrategy inputTypeStrategy;
private final LinkedHashMap<String, StateTypeStrategy> stateTypeStrategies;
private final TypeStrategy outputTypeStrategy;
private final boolean disableSystemArguments;
private TypeInference(
@Nullable List<StaticArgument> staticArguments,
InputTypeStrategy inputTypeStrategy,
LinkedHashMap<String, StateTypeStrategy> stateTypeStrategies,
TypeStrategy outputTypeStrategy,
boolean disableSystemArguments) {
this.staticArguments = staticArguments;
this.inputTypeStrategy = inputTypeStrategy;
this.stateTypeStrategies = stateTypeStrategies;
this.outputTypeStrategy = outputTypeStrategy;
this.disableSystemArguments = disableSystemArguments;
checkStateEntries();
}
/** Builder for configuring and creating instances of {@link TypeInference}. */
public static TypeInference.Builder newBuilder() {
return new TypeInference.Builder();
}
public Optional<List<StaticArgument>> getStaticArguments() {
return Optional.ofNullable(staticArguments);
}
public InputTypeStrategy getInputTypeStrategy() {
return inputTypeStrategy;
}
public LinkedHashMap<String, StateTypeStrategy> getStateTypeStrategies() {
return stateTypeStrategies;
}
public TypeStrategy getOutputTypeStrategy() {
return outputTypeStrategy;
}
public boolean disableSystemArguments() {
return disableSystemArguments;
}
/**
* @deprecated Use {@link #getStaticArguments()} instead.
*/
@Deprecated
public Optional<List<String>> getNamedArguments() {
return Optional.ofNullable(staticArguments)
.map(
args ->
args.stream()
.map(StaticArgument::getName)
.collect(Collectors.toList()));
}
/**
* @deprecated Use {@link #getStaticArguments()} instead.
*/
@Deprecated
public Optional<List<DataType>> getTypedArguments() {
return Optional.ofNullable(staticArguments)
.map(
args ->
args.stream()
.map(
arg ->
arg.getDataType()
.orElseThrow(
() ->
new IllegalArgumentException(
"Scalar argument with a data type expected.")))
.collect(Collectors.toList()));
}
/**
* @deprecated Use {@link #getStaticArguments()} instead.
*/
@Deprecated
public Optional<List<Boolean>> getOptionalArguments() {
return Optional.ofNullable(staticArguments)
.map(
args ->
args.stream()
.map(StaticArgument::isOptional)
.collect(Collectors.toList()));
}
/**
* @deprecated Use {@link #getStateTypeStrategies()} instead.
*/
@Deprecated
public Optional<TypeStrategy> getAccumulatorTypeStrategy() {
if (stateTypeStrategies.isEmpty()) {
return Optional.empty();
}
if (stateTypeStrategies.size() != 1) {
throw new IllegalArgumentException(
"An accumulator should contain exactly one state type strategy.");
}
return Optional.of(stateTypeStrategies.values().iterator().next());
}
private void checkStateEntries() {
// Verify state
final List<String> invalidStateEntries =
stateTypeStrategies.keySet().stream()
.filter(n -> !PARAMETER_NAME_FORMAT.test(n))
.collect(Collectors.toList());
if (!invalidStateEntries.isEmpty()) {
throw new ValidationException(
"Invalid state names. A state entry must follow the pattern [a-zA-Z_$][a-zA-Z_$0-9]*. But found: "
+ invalidStateEntries);
}
}
// --------------------------------------------------------------------------------------------
// Builder
// --------------------------------------------------------------------------------------------
/** Builder for configuring and creating instances of {@link TypeInference}. */
@PublicEvolving
public static | TypeInference |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RxReturnValueIgnoredTest.java | {
"start": 4782,
"end": 4954
} | class ____ extends IgnoringParent<Flowable<Integer>> {
@Override
Flowable<Integer> ignoringFunction() {
return null;
}
}
private | NonIgnoringFlowableChild |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestIncrementalBlockReports.java | {
"start": 2807,
"end": 20550
} | class ____ {
public static final Logger LOG =
LoggerFactory.getLogger(TestIncrementalBlockReports.class);
private static final short DN_COUNT = 1;
private static final long DUMMY_BLOCK_ID = 5678;
private static final long DUMMY_BLOCK_LENGTH = 1024 * 1024;
private static final long DUMMY_BLOCK_GENSTAMP = 1000;
private static final String TEST_FILE_DATA = "hello world";
private static final String TEST_FILE = "/TestStandbyBlockManagement";
private static final Path TEST_FILE_PATH = new Path(TEST_FILE);
private MiniDFSCluster cluster = null;
private Configuration conf;
private NameNode singletonNn;
private DataNode singletonDn;
private BPOfferService bpos; // BPOS to use for block injection.
private BPServiceActor actor; // BPSA to use for block injection.
private String storageUuid; // DatanodeStorage to use for block injection.
@BeforeEach
public void startCluster() throws IOException {
conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(DN_COUNT).build();
singletonNn = cluster.getNameNode();
singletonDn = cluster.getDataNodes().get(0);
bpos = singletonDn.getAllBpOs().get(0);
actor = bpos.getBPServiceActors().get(0);
try (FsDatasetSpi.FsVolumeReferences volumes =
singletonDn.getFSDataset().getFsVolumeReferences()) {
storageUuid = volumes.get(0).getStorageID();
}
}
private static Block getDummyBlock() {
return new Block(DUMMY_BLOCK_ID, DUMMY_BLOCK_LENGTH, DUMMY_BLOCK_GENSTAMP);
}
/**
* Inject a fake 'received' block into the BPServiceActor state.
*/
private void injectBlockReceived() {
ReceivedDeletedBlockInfo rdbi = new ReceivedDeletedBlockInfo(
getDummyBlock(), BlockStatus.RECEIVED_BLOCK, null);
DatanodeStorage s = singletonDn.getFSDataset().getStorage(storageUuid);
actor.getIbrManager().notifyNamenodeBlock(rdbi, s, false);
}
/**
* Inject a fake 'deleted' block into the BPServiceActor state.
*/
private void injectBlockDeleted() {
ReceivedDeletedBlockInfo rdbi = new ReceivedDeletedBlockInfo(
getDummyBlock(), BlockStatus.DELETED_BLOCK, null);
actor.getIbrManager().addRDBI(rdbi,
singletonDn.getFSDataset().getStorage(storageUuid));
}
/**
* Spy on calls from the DN to the NN.
* @return spy object that can be used for Mockito verification.
*/
DatanodeProtocolClientSideTranslatorPB spyOnDnCallsToNn() {
return InternalDataNodeTestUtils.spyOnBposToNN(singletonDn, singletonNn);
}
/**
* Ensure that an IBR is generated immediately for a block received by
* the DN.
*
* @throws InterruptedException
* @throws IOException
*/
@Test
@Timeout(value = 60)
public void testReportBlockReceived() throws InterruptedException, IOException {
try {
DatanodeProtocolClientSideTranslatorPB nnSpy = spyOnDnCallsToNn();
injectBlockReceived();
// Sleep for a very short time, this is necessary since the IBR is
// generated asynchronously.
Thread.sleep(2000);
// Ensure that the received block was reported immediately.
Mockito.verify(nnSpy, times(1)).blockReceivedAndDeleted(
any(DatanodeRegistration.class),
anyString(),
any(StorageReceivedDeletedBlocks[].class));
} finally {
cluster.shutdown();
cluster = null;
}
}
/**
* Ensure that a delayed IBR is generated for a block deleted on the DN.
*
* @throws InterruptedException
* @throws IOException
*/
@Test
@Timeout(value = 60)
public void testReportBlockDeleted() throws InterruptedException, IOException {
try {
// Trigger a block report to reset the IBR timer.
DataNodeTestUtils.triggerBlockReport(singletonDn);
// Spy on calls from the DN to the NN
DatanodeProtocolClientSideTranslatorPB nnSpy = spyOnDnCallsToNn();
injectBlockDeleted();
// Sleep for a very short time since IBR is generated
// asynchronously.
Thread.sleep(2000);
// Ensure that no block report was generated immediately.
// Deleted blocks are reported when the IBR timer elapses.
Mockito.verify(nnSpy, times(0)).blockReceivedAndDeleted(
any(DatanodeRegistration.class),
anyString(),
any(StorageReceivedDeletedBlocks[].class));
// Trigger a heartbeat, this also triggers an IBR.
DataNodeTestUtils.triggerHeartbeat(singletonDn);
Thread.sleep(2000);
// Ensure that the deleted block is reported.
Mockito.verify(nnSpy, times(1)).blockReceivedAndDeleted(
any(DatanodeRegistration.class),
anyString(),
any(StorageReceivedDeletedBlocks[].class));
} finally {
cluster.shutdown();
cluster = null;
}
}
/**
* Add a received block entry and then replace it. Ensure that a single
* IBR is generated and that pending receive request state is cleared.
* This test case verifies the failure in HDFS-5922.
*
* @throws InterruptedException
* @throws IOException
*/
@Test
@Timeout(value = 60)
public void testReplaceReceivedBlock() throws InterruptedException, IOException {
try {
// Spy on calls from the DN to the NN
DatanodeProtocolClientSideTranslatorPB nnSpy = spyOnDnCallsToNn();
injectBlockReceived();
injectBlockReceived(); // Overwrite the existing entry.
// Sleep for a very short time since IBR is generated
// asynchronously.
Thread.sleep(2000);
// Ensure that the received block is reported.
Mockito.verify(nnSpy, atLeastOnce()).blockReceivedAndDeleted(
any(DatanodeRegistration.class),
anyString(),
any(StorageReceivedDeletedBlocks[].class));
// Ensure that no more IBRs are pending.
assertFalse(actor.getIbrManager().sendImmediately());
} finally {
cluster.shutdown();
cluster = null;
}
}
@Test
public void testIBRRaceCondition() throws Exception {
cluster.shutdown();
conf = new Configuration();
HAUtil.setAllowStandbyReads(conf, true);
conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.numDataNodes(3)
.build();
try {
cluster.waitActive();
cluster.transitionToActive(0);
NameNode nn1 = cluster.getNameNode(0);
NameNode nn2 = cluster.getNameNode(1);
BlockManager bm2 = nn2.getNamesystem().getBlockManager();
FileSystem fs = HATestUtil.configureFailoverFs(cluster, conf);
List<InvocationOnMock> ibrsToStandby = new ArrayList<>();
List<DatanodeProtocolClientSideTranslatorPB> spies = new ArrayList<>();
Phaser ibrPhaser = new Phaser(1);
for (DataNode dn : cluster.getDataNodes()) {
DatanodeProtocolClientSideTranslatorPB nnSpy =
InternalDataNodeTestUtils.spyOnBposToNN(dn, nn2);
doAnswer((inv) -> {
for (StorageReceivedDeletedBlocks srdb :
inv.getArgument(2, StorageReceivedDeletedBlocks[].class)) {
for (ReceivedDeletedBlockInfo block : srdb.getBlocks()) {
if (block.getStatus().equals(BlockStatus.RECEIVED_BLOCK)) {
ibrPhaser.arriveAndDeregister();
}
}
}
return null;
}).when(nnSpy).blockReceivedAndDeleted(
any(DatanodeRegistration.class),
anyString(),
any(StorageReceivedDeletedBlocks[].class));
spies.add(nnSpy);
}
LOG.info("==================================");
// Force the DNs to delay report to the SNN
ibrPhaser.bulkRegister(9);
DFSTestUtil.writeFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
DFSTestUtil.appendFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
DFSTestUtil.appendFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
HATestUtil.waitForStandbyToCatchUp(nn1, nn2);
// SNN has caught up to the latest edit log so we send the IBRs to SNN
int phase = ibrPhaser.arrive();
ibrPhaser.awaitAdvanceInterruptibly(phase, 60, TimeUnit.SECONDS);
for (InvocationOnMock sendIBRs : ibrsToStandby) {
try {
sendIBRs.callRealMethod();
} catch (Throwable t) {
LOG.error("Exception thrown while calling sendIBRs: ", t);
}
}
GenericTestUtils.waitFor(() -> bm2.getPendingDataNodeMessageCount() == 0,
1000, 30000,
"There should be 0 pending DN messages");
ibrsToStandby.clear();
// We need to trigger another edit log roll so that the pendingDNMessages
// are processed.
ibrPhaser.bulkRegister(6);
DFSTestUtil.appendFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
DFSTestUtil.appendFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
phase = ibrPhaser.arrive();
ibrPhaser.awaitAdvanceInterruptibly(phase, 60, TimeUnit.SECONDS);
for (InvocationOnMock sendIBRs : ibrsToStandby) {
try {
sendIBRs.callRealMethod();
} catch (Throwable t) {
LOG.error("Exception thrown while calling sendIBRs: ", t);
}
}
ibrsToStandby.clear();
ibrPhaser.arriveAndDeregister();
GenericTestUtils.waitFor(() -> bm2.getPendingDataNodeMessageCount() == 0,
1000, 30000,
"There should be 0 pending DN messages");
ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, TEST_FILE_PATH);
HATestUtil.waitForStandbyToCatchUp(nn1, nn2);
LOG.info("==================================");
// Trigger an active switch to force SNN to mark blocks as corrupt if they
// have a bad genstamp in the pendingDNMessages queue.
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
cluster.waitActive(1);
assertEquals(0,
nn2.getNamesystem().getBlockManager().numCorruptReplicas(block.getLocalBlock()),
"There should not be any corrupt replicas");
} finally {
cluster.shutdown();
}
}
@Test
public void testIBRRaceCondition2() throws Exception {
cluster.shutdown();
Configuration conf = new Configuration();
HAUtil.setAllowStandbyReads(conf, true);
conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.numDataNodes(3)
.build();
try {
cluster.waitActive();
cluster.transitionToActive(0);
NameNode nn1 = cluster.getNameNode(0);
NameNode nn2 = cluster.getNameNode(1);
BlockManager bm2 = nn2.getNamesystem().getBlockManager();
FileSystem fs = HATestUtil.configureFailoverFs(cluster, conf);
List<InvocationOnMock> ibrsToStandby = new ArrayList<>();
List<DatanodeProtocolClientSideTranslatorPB> spies = new ArrayList<>();
Phaser ibrPhaser = new Phaser(1);
for (DataNode dn : cluster.getDataNodes()) {
DatanodeProtocolClientSideTranslatorPB nnSpy =
InternalDataNodeTestUtils.spyOnBposToNN(dn, nn2);
doAnswer((inv) -> {
for (StorageReceivedDeletedBlocks srdb :
inv.getArgument(2, StorageReceivedDeletedBlocks[].class)) {
for (ReceivedDeletedBlockInfo block : srdb.getBlocks()) {
if (block.getStatus().equals(BlockStatus.RECEIVED_BLOCK)) {
ibrsToStandby.add(inv);
ibrPhaser.arriveAndDeregister();
}
}
}
return null;
}).when(nnSpy).blockReceivedAndDeleted(
any(DatanodeRegistration.class),
anyString(),
any(StorageReceivedDeletedBlocks[].class));
spies.add(nnSpy);
}
LOG.info("==================================");
// Force the DNs to delay report to the SNN
ibrPhaser.bulkRegister(9);
DFSTestUtil.writeFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
DFSTestUtil.appendFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
DFSTestUtil.appendFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
HATestUtil.waitForStandbyToCatchUp(nn1, nn2);
// SNN has caught up to the latest edit log so we send the IBRs to SNN
int phase = ibrPhaser.arrive();
ibrPhaser.awaitAdvanceInterruptibly(phase, 60, TimeUnit.SECONDS);
for (InvocationOnMock sendIBRs : ibrsToStandby) {
try {
sendIBRs.callRealMethod();
} catch (Throwable t) {
LOG.error("Exception thrown while calling sendIBRs: ", t);
}
}
GenericTestUtils.waitFor(() -> bm2.getPendingDataNodeMessageCount() == 0,
1000, 30000,
"There should be 0 pending DN messages");
ibrsToStandby.clear();
ibrPhaser.arriveAndDeregister();
ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, TEST_FILE_PATH);
HATestUtil.waitForStandbyToCatchUp(nn1, nn2);
LOG.info("==================================");
// Trigger an active switch to force SNN to mark blocks as corrupt if they
// have a bad genstamp in the pendingDNMessages queue.
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
cluster.waitActive(1);
assertEquals(0,
nn2.getNamesystem().getBlockManager().numCorruptReplicas(block.getLocalBlock()),
"There should not be any corrupt replicas");
} finally {
cluster.shutdown();
}
}
@Test
public void testIBRRaceCondition3() throws Exception {
cluster.shutdown();
Configuration conf = new Configuration();
HAUtil.setAllowStandbyReads(conf, true);
conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.numDataNodes(3)
.build();
try {
cluster.waitActive();
cluster.transitionToActive(0);
NameNode nn1 = cluster.getNameNode(0);
NameNode nn2 = cluster.getNameNode(1);
BlockManager bm2 = nn2.getNamesystem().getBlockManager();
FileSystem fs = HATestUtil.configureFailoverFs(cluster, conf);
LinkedHashMap<Long, List<InvocationOnMock>> ibrsToStandby =
new LinkedHashMap<>();
AtomicLong lowestGenStamp = new AtomicLong(Long.MAX_VALUE);
List<DatanodeProtocolClientSideTranslatorPB> spies = new ArrayList<>();
Phaser ibrPhaser = new Phaser(1);
for (DataNode dn : cluster.getDataNodes()) {
DatanodeProtocolClientSideTranslatorPB nnSpy =
InternalDataNodeTestUtils.spyOnBposToNN(dn, nn2);
doAnswer((inv) -> {
for (StorageReceivedDeletedBlocks srdb :
inv.getArgument(2, StorageReceivedDeletedBlocks[].class)) {
for (ReceivedDeletedBlockInfo block : srdb.getBlocks()) {
if (block.getStatus().equals(BlockStatus.RECEIVED_BLOCK)) {
long genStamp = block.getBlock().getGenerationStamp();
ibrsToStandby.putIfAbsent(genStamp, new ArrayList<>());
ibrsToStandby.get(genStamp).add(inv);
lowestGenStamp.getAndUpdate((prev) -> Math.min(prev, genStamp));
ibrPhaser.arriveAndDeregister();
}
}
}
return null;
}).when(nnSpy).blockReceivedAndDeleted(
any(DatanodeRegistration.class),
anyString(),
any(StorageReceivedDeletedBlocks[].class));
spies.add(nnSpy);
}
LOG.info("==================================");
// Force the DNs to delay report to the SNN
ibrPhaser.bulkRegister(9);
DFSTestUtil.writeFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
DFSTestUtil.appendFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
DFSTestUtil.appendFile(fs, TEST_FILE_PATH, TEST_FILE_DATA);
HATestUtil.waitForStandbyToCatchUp(nn1, nn2);
// SNN has caught up to the latest edit log so we send the IBRs to SNN
int phase = ibrPhaser.arrive();
ibrPhaser.awaitAdvanceInterruptibly(phase, 60, TimeUnit.SECONDS);
ibrsToStandby.forEach((genStamp, ibrs) -> {
if (lowestGenStamp.get() != genStamp) {
ibrs.removeIf(inv -> {
try {
inv.callRealMethod();
} catch (Throwable t) {
LOG.error("Exception thrown while calling sendIBRs: ", t);
}
return true;
});
}
});
GenericTestUtils.waitFor(() -> bm2.getPendingDataNodeMessageCount() == 0,
1000, 30000,
"There should be 0 pending DN messages");
ibrPhaser.arriveAndDeregister();
ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, TEST_FILE_PATH);
HATestUtil.waitForStandbyToCatchUp(nn1, nn2);
// Send old ibrs to simulate actual stale or corrupt DNs
for (InvocationOnMock sendIBR : ibrsToStandby.get(lowestGenStamp.get())) {
try {
sendIBR.callRealMethod();
} catch (Throwable t) {
LOG.error("Exception thrown while calling sendIBRs: ", t);
}
}
GenericTestUtils.waitFor(() -> bm2.getPendingDataNodeMessageCount() == 3,
1000, 30000,
"There should be 0 pending DN messages");
LOG.info("==================================");
// Trigger an active switch to force SNN to mark blocks as corrupt if they
// have a bad genstamp in the pendingDNMessages queue.
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
cluster.waitActive(1);
assertEquals(1,
nn2.getNamesystem().getBlockManager().numCorruptReplicas(block.getLocalBlock()),
"There should be 1 corrupt replica");
} finally {
cluster.shutdown();
}
}
}
| TestIncrementalBlockReports |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/junit/MockitoTestRule.java | {
"start": 436,
"end": 772
} | interface ____ extends TestRule {
/**
* Equivalent to {@link MockitoRule#silent()}.
*
* @since 3.3.0
*/
MockitoTestRule silent();
/**
* Equivalent to {@link MockitoRule#strictness(Strictness)}.
*
* @since 3.3.0
*/
MockitoTestRule strictness(Strictness strictness);
}
| MockitoTestRule |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsKeywordsEvaluator.java | {
"start": 1245,
"end": 5341
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(EqualsKeywordsEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator lhs;
private final EvalOperator.ExpressionEvaluator rhs;
private final DriverContext driverContext;
private Warnings warnings;
public EqualsKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs,
EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) {
this.source = source;
this.lhs = lhs;
this.rhs = rhs;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (BytesRefBlock lhsBlock = (BytesRefBlock) lhs.eval(page)) {
try (BytesRefBlock rhsBlock = (BytesRefBlock) rhs.eval(page)) {
BytesRefVector lhsVector = lhsBlock.asVector();
if (lhsVector == null) {
return eval(page.getPositionCount(), lhsBlock, rhsBlock);
}
BytesRefVector rhsVector = rhsBlock.asVector();
if (rhsVector == null) {
return eval(page.getPositionCount(), lhsBlock, rhsBlock);
}
return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock();
}
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += lhs.baseRamBytesUsed();
baseRamBytesUsed += rhs.baseRamBytesUsed();
return baseRamBytesUsed;
}
public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBlock rhsBlock) {
try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) {
BytesRef lhsScratch = new BytesRef();
BytesRef rhsScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
switch (lhsBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
switch (rhsBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
BytesRef lhs = lhsBlock.getBytesRef(lhsBlock.getFirstValueIndex(p), lhsScratch);
BytesRef rhs = rhsBlock.getBytesRef(rhsBlock.getFirstValueIndex(p), rhsScratch);
result.appendBoolean(Equals.processKeywords(lhs, rhs));
}
return result.build();
}
}
public BooleanVector eval(int positionCount, BytesRefVector lhsVector, BytesRefVector rhsVector) {
try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) {
BytesRef lhsScratch = new BytesRef();
BytesRef rhsScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
BytesRef lhs = lhsVector.getBytesRef(p, lhsScratch);
BytesRef rhs = rhsVector.getBytesRef(p, rhsScratch);
result.appendBoolean(p, Equals.processKeywords(lhs, rhs));
}
return result.build();
}
}
@Override
public String toString() {
return "EqualsKeywordsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(lhs, rhs);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | EqualsKeywordsEvaluator |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/UnalignedCheckpointRescaleITCase.java | {
"start": 35459,
"end": 35690
} | class ____ implements Partitioner<String> {
@Override
public int partition(String key, int numPartitions) {
return Integer.parseInt(key) % numPartitions;
}
}
private static | StringPartitioner |
java | grpc__grpc-java | okhttp/src/main/java/io/grpc/okhttp/OkHttpClientStream.java | {
"start": 4118,
"end": 5599
} | class ____ implements AbstractClientStream.Sink {
@Override
public void writeHeaders(Metadata metadata, byte[] payload) {
try (TaskCloseable ignore = PerfMark.traceTask("OkHttpClientStream$Sink.writeHeaders")) {
String defaultPath = "/" + method.getFullMethodName();
if (payload != null) {
useGet = true;
defaultPath += "?" + BaseEncoding.base64().encode(payload);
}
synchronized (state.lock) {
state.streamReady(metadata, defaultPath);
}
}
}
@Override
public void writeFrame(
WritableBuffer frame, boolean endOfStream, boolean flush, int numMessages) {
try (TaskCloseable ignore = PerfMark.traceTask("OkHttpClientStream$Sink.writeFrame")) {
Buffer buffer;
if (frame == null) {
buffer = EMPTY_BUFFER;
} else {
buffer = ((OkHttpWritableBuffer) frame).buffer();
int size = (int) buffer.size();
if (size > 0) {
onSendingBytes(size);
}
}
synchronized (state.lock) {
state.sendBuffer(buffer, endOfStream, flush);
getTransportTracer().reportMessageSent(numMessages);
}
}
}
@Override
public void cancel(Status reason) {
try (TaskCloseable ignore = PerfMark.traceTask("OkHttpClientStream$Sink.cancel")) {
synchronized (state.lock) {
state.cancel(reason, true, null);
}
}
}
}
| Sink |
java | spring-projects__spring-boot | build-plugin/spring-boot-maven-plugin/src/intTest/projects/start-stop/src/main/java/org/test/SampleApplication.java | {
"start": 873,
"end": 1590
} | class ____ {
private static final Object lock = new Object();
public static void main(String[] args) throws Exception {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName name = new ObjectName(
"org.springframework.boot:type=Admin,name=SpringApplication");
SpringApplicationAdmin mbean = new SpringApplicationAdmin();
mbs.registerMBean(mbean, name);
// Flag the app as ready
mbean.ready = true;
int waitAttempts = 0;
while (!mbean.shutdownInvoked) {
if (waitAttempts > 30) {
throw new IllegalStateException(
"Shutdown should have been invoked by now");
}
synchronized (lock) {
lock.wait(250);
}
waitAttempts++;
}
}
public | SampleApplication |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/ValidationUtils.java | {
"start": 1052,
"end": 1114
} | class ____ validation of operations. */
@Internal
public final | for |
java | grpc__grpc-java | xds/src/test/java/io/grpc/xds/orca/OrcaMetricReportingServerInterceptorTest.java | {
"start": 13899,
"end": 14336
} | class ____
extends SimpleForwardingClientCallListener<RespT> {
TrailersCapturingClientCallListener(ClientCall.Listener<RespT> responseListener) {
super(responseListener);
}
@Override
public void onClose(Status status, Metadata trailers) {
trailersCapture.set(trailers);
super.onClose(status, trailers);
}
}
}
}
}
| TrailersCapturingClientCallListener |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/InternalTimeServiceManager.java | {
"start": 1759,
"end": 1913
} | interface ____<K> {
/** Signals whether the watermark should continue advancing. */
@Internal
@FunctionalInterface
| InternalTimeServiceManager |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java | {
"start": 85547,
"end": 85745
} | class ____ implements Converter<String, Alien> {
@Override
public Alien convert(String source) {
return new Alien(new StringBuilder(source).reverse().toString());
}
}
static | AlienConverter |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SedaComponentBuilderFactory.java | {
"start": 1878,
"end": 10359
} | interface ____ extends ComponentBuilder<SedaComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default SedaComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Sets the default number of concurrent threads processing exchanges.
*
* The option is a: <code>int</code> type.
*
* Default: 1
* Group: consumer
*
* @param concurrentConsumers the value to set
* @return the dsl builder
*/
default SedaComponentBuilder concurrentConsumers(int concurrentConsumers) {
doSetProperty("concurrentConsumers", concurrentConsumers);
return this;
}
/**
* The timeout (in milliseconds) used when polling. When a timeout
* occurs, the consumer can check whether it is allowed to continue
* running. Setting a lower value allows the consumer to react more
* quickly upon shutdown.
*
* The option is a: <code>int</code> type.
*
* Default: 1000
* Group: consumer (advanced)
*
* @param defaultPollTimeout the value to set
* @return the dsl builder
*/
default SedaComponentBuilder defaultPollTimeout(int defaultPollTimeout) {
doSetProperty("defaultPollTimeout", defaultPollTimeout);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default SedaComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will block
* until the queue's capacity is no longer exhausted. By default, an
* exception will be thrown stating that the queue is full. By enabling
* this option, the calling thread will instead block and wait until the
* message can be accepted.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param defaultBlockWhenFull the value to set
* @return the dsl builder
*/
default SedaComponentBuilder defaultBlockWhenFull(boolean defaultBlockWhenFull) {
doSetProperty("defaultBlockWhenFull", defaultBlockWhenFull);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will be
* discarded. By default, an exception will be thrown stating that the
* queue is full. By enabling this option, the calling thread will give
* up sending and continue, meaning that the message was not sent to the
* SEDA queue.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param defaultDiscardWhenFull the value to set
* @return the dsl builder
*/
default SedaComponentBuilder defaultDiscardWhenFull(boolean defaultDiscardWhenFull) {
doSetProperty("defaultDiscardWhenFull", defaultDiscardWhenFull);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will block
* until the queue's capacity is no longer exhausted. By default, an
* exception will be thrown stating that the queue is full. By enabling
* this option, where a configured timeout can be added to the block
* case. Using the offer(timeout) method of the underlining java queue.
*
* The option is a: <code>long</code> type.
*
* Group: producer (advanced)
*
* @param defaultOfferTimeout the value to set
* @return the dsl builder
*/
default SedaComponentBuilder defaultOfferTimeout(long defaultOfferTimeout) {
doSetProperty("defaultOfferTimeout", defaultOfferTimeout);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default SedaComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Sets the default queue factory.
*
* The option is a:
* <code>org.apache.camel.component.seda.BlockingQueueFactory&lt;org.apache.camel.Exchange&gt;</code> type.
*
* Group: advanced
*
* @param defaultQueueFactory the value to set
* @return the dsl builder
*/
default SedaComponentBuilder defaultQueueFactory(org.apache.camel.component.seda.BlockingQueueFactory<org.apache.camel.Exchange> defaultQueueFactory) {
doSetProperty("defaultQueueFactory", defaultQueueFactory);
return this;
}
/**
* Sets the default maximum capacity of the SEDA queue (i.e., the number
* of messages it can hold).
*
* The option is a: <code>int</code> type.
*
* Default: 1000
* Group: advanced
*
* @param queueSize the value to set
* @return the dsl builder
*/
default SedaComponentBuilder queueSize(int queueSize) {
doSetProperty("queueSize", queueSize);
return this;
}
}
| SedaComponentBuilder |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/engine/support/hierarchical/NodeTreeWalkerIntegrationTests.java | {
"start": 11885,
"end": 12027
} | class ____ {
@Test
@ResourceLock("b")
void test() {
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
static | TestCaseWithResourceLock |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/config/ServiceConfigBase.java | {
"start": 2005,
"end": 2156
} | class ____<T> extends AbstractServiceConfig {
private static final long serialVersionUID = 3033787999037024738L;
/**
* The | ServiceConfigBase |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/search/arguments/VectorFieldArgs.java | {
"start": 1063,
"end": 1182
} | class ____<K> extends FieldArgs<K> {
/**
* Vector similarity index algorithms.
*/
public | VectorFieldArgs |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/scripting/LanguageDriverRegistryTest.java | {
"start": 3682,
"end": 4215
} | class ____ implements LanguageDriver {
@Override
public ParameterHandler createParameterHandler(MappedStatement mappedStatement, Object parameterObject,
BoundSql boundSql) {
return null;
}
@Override
public SqlSource createSqlSource(Configuration configuration, XNode script, Class<?> parameterType) {
return null;
}
@Override
public SqlSource createSqlSource(Configuration configuration, String script, Class<?> parameterType) {
return null;
}
}
}
| PrivateLanguageDriver |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hbm/uk/UniqueDelegateTest.java | {
"start": 3435,
"end": 3711
} | class ____ extends H2Dialect {
private MyUniqueDelegate myUniqueDelegate;
public MyDialect() {
this.myUniqueDelegate = new MyUniqueDelegate( this );
}
@Override
public UniqueDelegate getUniqueDelegate() {
return myUniqueDelegate;
}
}
public static | MyDialect |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/resulttype/AppleFamilyDto.java | {
"start": 244,
"end": 439
} | class ____ {
private AppleDto apple;
public AppleDto getApple() {
return apple;
}
public void setApple(AppleDto apple) {
this.apple = apple;
}
}
| AppleFamilyDto |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilder.java | {
"start": 20655,
"end": 21335
} | enum ____ implements Writeable {
CHARS,
WORD,
SENTENCE;
public static BoundaryScannerType readFromStream(StreamInput in) throws IOException {
return in.readEnum(BoundaryScannerType.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(this);
}
public static BoundaryScannerType fromString(String boundaryScannerType) {
return valueOf(boundaryScannerType.toUpperCase(Locale.ROOT));
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
}
| BoundaryScannerType |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/BeanRegistry.java | {
"start": 2171,
"end": 3145
} | class ____ the bean
* @return the generated bean name
* @see #registerBean(Class)
*/
<T> String registerBean(Class<T> beanClass);
/**
* Register a bean from the given generics-containing type, which will be
* instantiated using the related
* {@link BeanUtils#getResolvableConstructor resolvable constructor} if any.
* @param beanType the generics-containing type of the bean
* @return the generated bean name
*/
<T> String registerBean(ParameterizedTypeReference<T> beanType);
/**
* Register a bean from the given class, customizing it with the customizer
* callback. The bean will be instantiated using the supplier that can be configured
* in the customizer callback, or will be tentatively instantiated with its
* {@link BeanUtils#getResolvableConstructor resolvable constructor} otherwise.
* <p>For registering a bean with a generic type, consider
* {@link #registerBean(ParameterizedTypeReference, Consumer)}.
* @param beanClass the | of |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.