language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-http12/src/test/java/org/apache/dubbo/remoting/http12/message/codec/User.java
|
{
"start": 962,
"end": 1668
}
|
class ____ implements Serializable {
private String username;
private String location;
public User() {}
public User(String username, String location) {
this.username = username;
this.location = location;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public String toString() {
return "User{" + "username='" + username + '\'' + ", location='" + location + '\'' + '}';
}
}
|
User
|
java
|
qos-ch__slf4j
|
slf4j-api/src/main/java/org/slf4j/spi/SLF4JServiceProvider.java
|
{
"start": 511,
"end": 753
}
|
class ____ bind to.
*
* @return instance of {@link ILoggerFactory}
*/
public ILoggerFactory getLoggerFactory();
/**
* Return the instance of {@link IMarkerFactory} that
* {@link org.slf4j.MarkerFactory}
|
should
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1155/Entity.java
|
{
"start": 359,
"end": 527
}
|
class ____ {
//CHECKSTYLE:OFF
public long clientId;
//CHECKSTYLE:ON
}
//CHECKSTYLE:OFF
public Client client;
//CHECKSTYLE:ON
}
|
Dto
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/handler/invocation/AbstractMethodMessageHandler.java
|
{
"start": 9682,
"end": 23049
}
|
class ____ bean with name '" + beanName + "'", ex);
}
}
if (beanType != null && isHandler(beanType)) {
detectHandlerMethods(beanName);
}
}
}
}
/**
* Return the list of argument resolvers to use. Invoked only if the resolvers
* have not already been set via {@link #setArgumentResolvers}.
* <p>Subclasses should also take into account custom argument types configured via
* {@link #setCustomArgumentResolvers}.
*/
protected abstract List<? extends HandlerMethodArgumentResolver> initArgumentResolvers();
/**
* Return the list of return value handlers to use. Invoked only if the return
* value handlers have not already been set via {@link #setReturnValueHandlers}.
* <p>Subclasses should also take into account custom return value types configured
* via {@link #setCustomReturnValueHandlers}.
*/
protected abstract List<? extends HandlerMethodReturnValueHandler> initReturnValueHandlers();
/**
* Whether the given bean type should be introspected for messaging handling methods.
*/
protected abstract boolean isHandler(Class<?> beanType);
/**
* Detect if the given handler has any methods that can handle messages and if
* so register it with the extracted mapping information.
* @param handler the handler to check, either an instance of a Spring bean name
*/
protected final void detectHandlerMethods(final Object handler) {
Class<?> handlerType;
if (handler instanceof String beanName) {
ApplicationContext context = getApplicationContext();
Assert.state(context != null, "ApplicationContext is required for resolving handler bean names");
handlerType = context.getType(beanName);
}
else {
handlerType = handler.getClass();
}
if (handlerType != null) {
final Class<?> userType = ClassUtils.getUserClass(handlerType);
Map<Method, T> methods = MethodIntrospector.selectMethods(userType,
(MethodIntrospector.MetadataLookup<T>) method -> getMappingForMethod(method, userType));
if (logger.isDebugEnabled()) {
logger.debug(formatMappings(userType, methods));
}
methods.forEach((key, value) -> registerHandlerMethod(handler, key, value));
}
}
private String formatMappings(Class<?> userType, Map<Method, T> methods) {
String packageName = ClassUtils.getPackageName(userType);
String formattedType = (StringUtils.hasText(packageName) ?
Arrays.stream(packageName.split("\\."))
.map(packageSegment -> packageSegment.substring(0, 1))
.collect(Collectors.joining(".", "", "." + userType.getSimpleName())) :
userType.getSimpleName());
Function<Method, String> methodFormatter = method -> Arrays.stream(method.getParameterTypes())
.map(Class::getSimpleName)
.collect(Collectors.joining(",", "(", ")"));
return methods.entrySet().stream()
.map(e -> {
Method method = e.getKey();
return e.getValue() + ": " + method.getName() + methodFormatter.apply(method);
})
.collect(Collectors.joining("\n\t", "\n\t" + formattedType + ":" + "\n\t", ""));
}
/**
* Provide the mapping for a handler method.
* @param method the method to provide a mapping for
* @param handlerType the handler type, possibly a subtype of the method's declaring class
* @return the mapping, or {@code null} if the method is not mapped
*/
protected abstract @Nullable T getMappingForMethod(Method method, Class<?> handlerType);
/**
* Register a handler method and its unique mapping.
* @param handler the bean name of the handler or the handler instance
* @param method the method to register
* @param mapping the mapping conditions associated with the handler method
* @throws IllegalStateException if another method was already registered
* under the same mapping
*/
protected void registerHandlerMethod(Object handler, Method method, T mapping) {
Assert.notNull(mapping, "Mapping must not be null");
HandlerMethod newHandlerMethod = createHandlerMethod(handler, method);
HandlerMethod oldHandlerMethod = this.handlerMethods.get(mapping);
if (oldHandlerMethod != null && !oldHandlerMethod.equals(newHandlerMethod)) {
throw new IllegalStateException("Ambiguous mapping found. Cannot map '" + newHandlerMethod.getBean() +
"' bean method \n" + newHandlerMethod + "\nto " + mapping + ": There is already '" +
oldHandlerMethod.getBean() + "' bean method\n" + oldHandlerMethod + " mapped.");
}
this.handlerMethods.put(mapping, newHandlerMethod);
for (String pattern : getDirectLookupDestinations(mapping)) {
this.destinationLookup.add(pattern, mapping);
}
}
/**
* Create a HandlerMethod instance from an Object handler that is either a handler
* instance or a String-based bean name.
*/
protected HandlerMethod createHandlerMethod(Object handler, Method method) {
HandlerMethod handlerMethod;
if (handler instanceof String beanName) {
ApplicationContext context = getApplicationContext();
Assert.state(context != null, "ApplicationContext is required for resolving handler bean names");
handlerMethod = new HandlerMethod(beanName, context.getAutowireCapableBeanFactory(), method);
}
else {
handlerMethod = new HandlerMethod(handler, method);
}
return handlerMethod;
}
/**
* Return destinations contained in the mapping that are not patterns and are
* therefore suitable for direct lookups.
*/
protected abstract Set<String> getDirectLookupDestinations(T mapping);
/**
* Return a logger to set on {@link HandlerMethodReturnValueHandlerComposite}.
* @since 5.1
*/
protected @Nullable Log getReturnValueHandlerLogger() {
return null;
}
/**
* Return a logger to set on {@link InvocableHandlerMethod}.
* @since 5.1
*/
protected @Nullable Log getHandlerMethodLogger() {
return null;
}
/**
* Subclasses can invoke this method to populate the MessagingAdviceBean cache
* (for example, to support "global" {@code @MessageExceptionHandler}).
* @since 4.2
*/
protected void registerExceptionHandlerAdvice(
MessagingAdviceBean bean, AbstractExceptionHandlerMethodResolver resolver) {
this.exceptionHandlerAdviceCache.put(bean, resolver);
}
/**
* Return a map with all handler methods and their mappings.
*/
public Map<T, HandlerMethod> getHandlerMethods() {
return Collections.unmodifiableMap(this.handlerMethods);
}
@Override
public void handleMessage(Message<?> message) throws MessagingException {
String destination = getDestination(message);
if (destination == null) {
return;
}
String lookupDestination = getLookupDestination(destination);
if (lookupDestination == null) {
return;
}
MessageHeaderAccessor headerAccessor = MessageHeaderAccessor.getMutableAccessor(message);
headerAccessor.setHeader(DestinationPatternsMessageCondition.LOOKUP_DESTINATION_HEADER, lookupDestination);
headerAccessor.setLeaveMutable(true);
message = MessageBuilder.createMessage(message.getPayload(), headerAccessor.getMessageHeaders());
if (logger.isDebugEnabled()) {
logger.debug("Searching methods to handle " +
headerAccessor.getShortLogMessage(message.getPayload()) +
", lookupDestination='" + lookupDestination + "'");
}
handleMessageInternal(message, lookupDestination);
headerAccessor.setImmutable();
}
protected abstract @Nullable String getDestination(Message<?> message);
/**
* Check whether the given destination (of an incoming message) matches to
* one of the configured destination prefixes and if so return the remaining
* portion of the destination after the matched prefix.
* <p>If there are no matching prefixes, return {@code null}.
* <p>If there are no destination prefixes, return the destination as is.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
protected @Nullable String getLookupDestination(@Nullable String destination) {
if (destination == null) {
return null;
}
if (CollectionUtils.isEmpty(this.destinationPrefixes)) {
return destination;
}
for (String prefix : this.destinationPrefixes) {
if (destination.startsWith(prefix)) {
return destination.substring(prefix.length());
}
}
return null;
}
protected void handleMessageInternal(Message<?> message, String lookupDestination) {
List<Match> matches = new ArrayList<>();
List<T> mappingsByUrl = this.destinationLookup.get(lookupDestination);
if (mappingsByUrl != null) {
addMatchesToCollection(mappingsByUrl, message, matches);
}
if (matches.isEmpty()) {
// No direct hits, go through all mappings
Set<T> allMappings = this.handlerMethods.keySet();
addMatchesToCollection(allMappings, message, matches);
}
if (matches.isEmpty()) {
handleNoMatch(this.handlerMethods.keySet(), lookupDestination, message);
return;
}
Comparator<Match> comparator = new MatchComparator(getMappingComparator(message));
matches.sort(comparator);
if (logger.isTraceEnabled()) {
logger.trace("Found " + matches.size() + " handler methods: " + matches);
}
Match bestMatch = matches.get(0);
if (matches.size() > 1) {
Match secondBestMatch = matches.get(1);
if (comparator.compare(bestMatch, secondBestMatch) == 0) {
Method m1 = bestMatch.handlerMethod.getMethod();
Method m2 = secondBestMatch.handlerMethod.getMethod();
throw new IllegalStateException("Ambiguous handler methods mapped for destination '" +
lookupDestination + "': {" + m1 + ", " + m2 + "}");
}
}
handleMatch(bestMatch.mapping, bestMatch.handlerMethod, lookupDestination, message);
}
private void addMatchesToCollection(Collection<T> mappingsToCheck, Message<?> message, List<Match> matches) {
for (T mapping : mappingsToCheck) {
T match = getMatchingMapping(mapping, message);
if (match != null) {
HandlerMethod handlerMethod = this.handlerMethods.get(mapping);
Assert.state(handlerMethod != null, "HandlerMethod must not be null");
matches.add(new Match(match, handlerMethod));
}
}
}
/**
* Check if a mapping matches the current message and return a possibly
* new mapping with conditions relevant to the current request.
* @param mapping the mapping to get a match for
* @param message the message being handled
* @return the match or {@code null} if there is no match
*/
protected abstract @Nullable T getMatchingMapping(T mapping, Message<?> message);
protected void handleNoMatch(Set<T> ts, String lookupDestination, Message<?> message) {
logger.debug("No matching message handler methods.");
}
/**
* Return a comparator for sorting matching mappings.
* The returned comparator should sort 'better' matches higher.
* @param message the current Message
* @return the comparator, never {@code null}
*/
protected abstract Comparator<T> getMappingComparator(Message<?> message);
protected void handleMatch(T mapping, HandlerMethod handlerMethod, String lookupDestination, Message<?> message) {
if (logger.isDebugEnabled()) {
logger.debug("Invoking " + handlerMethod.getShortLogMessage());
}
handlerMethod = handlerMethod.createWithResolvedBean();
InvocableHandlerMethod invocable = new InvocableHandlerMethod(handlerMethod);
if (this.handlerMethodLogger != null) {
invocable.setLogger(this.handlerMethodLogger);
}
invocable.setMessageMethodArgumentResolvers(this.argumentResolvers);
try {
Object returnValue = invocable.invoke(message);
MethodParameter returnType = handlerMethod.getReturnType();
if (void.class == returnType.getParameterType()) {
return;
}
if (returnValue != null && this.returnValueHandlers.isAsyncReturnValue(returnValue, returnType)) {
CompletableFuture<?> future = this.returnValueHandlers.toCompletableFuture(returnValue, returnType);
if (future != null) {
future.whenComplete(new ReturnValueCallback(invocable, message));
}
}
else {
this.returnValueHandlers.handleReturnValue(returnValue, returnType, message);
}
}
catch (Exception ex) {
processHandlerMethodException(handlerMethod, ex, message);
}
catch (Throwable ex) {
Exception handlingException =
new MessageHandlingException(message, "Unexpected handler method invocation error", ex);
processHandlerMethodException(handlerMethod, handlingException, message);
}
}
protected void processHandlerMethodException(HandlerMethod handlerMethod, Exception exception, Message<?> message) {
InvocableHandlerMethod invocable = getExceptionHandlerMethod(handlerMethod, exception);
if (invocable == null) {
logger.error("Unhandled exception from message handler method", exception);
return;
}
invocable.setMessageMethodArgumentResolvers(this.argumentResolvers);
if (logger.isDebugEnabled()) {
logger.debug("Invoking " + invocable.getShortLogMessage());
}
try {
Throwable cause = exception.getCause();
Object returnValue = (cause != null ?
invocable.invoke(message, exception, cause, handlerMethod) :
invocable.invoke(message, exception, handlerMethod));
MethodParameter returnType = invocable.getReturnType();
if (void.class == returnType.getParameterType()) {
return;
}
this.returnValueHandlers.handleReturnValue(returnValue, returnType, message);
}
catch (Throwable ex2) {
logger.error("Error while processing handler method exception", ex2);
}
}
/**
* Find an {@code @MessageExceptionHandler} method for the given exception.
* The default implementation searches methods in the
|
for
|
java
|
alibaba__nacos
|
consistency/src/main/java/com/alibaba/nacos/consistency/Config.java
|
{
"start": 1419,
"end": 2879
}
|
interface ____<L extends RequestProcessor> extends Serializable {
/**
* Set the cluster node information to initialize,like [ip:port, ip:port, ip:port].
*
* @param self local node address information, ip:port
* @param members {@link Set}
*/
void setMembers(String self, Set<String> members);
/**
* members join.
*
* @param members {@link Set}
*/
void addMembers(Set<String> members);
/**
* members leave.
*
* @param members {@link Set}
*/
void removeMembers(Set<String> members);
/**
* get local node address info.
*
* @return address
*/
String getSelfMember();
/**
* get the cluster node information.
*
* @return members info, like [ip:port, ip:port, ip:port]
*/
Set<String> getMembers();
/**
* Add configuration content.
*
* @param key config key
* @param value config value
*/
void setVal(String key, String value);
/**
* get configuration content by key.
*
* @param key config key
* @return config value
*/
String getVal(String key);
/**
* get configuration content by key, if not found, use default-val.
*
* @param key config key
* @param defaultVal default value
* @return config value
*/
String getValOfDefault(String key, String defaultVal);
}
|
Config
|
java
|
spring-projects__spring-boot
|
module/spring-boot-quartz/src/test/java/org/springframework/boot/quartz/actuate/endpoint/QuartzEndpointTests.java
|
{
"start": 3481,
"end": 41183
}
|
class ____ {
private static final JobDetail jobOne = JobBuilder.newJob(Job.class).withIdentity("jobOne").build();
private static final JobDetail jobTwo = JobBuilder.newJob(DelegatingJob.class).withIdentity("jobTwo").build();
private static final JobDetail jobThree = JobBuilder.newJob(Job.class).withIdentity("jobThree", "samples").build();
private static final Trigger triggerOne = TriggerBuilder.newTrigger()
.forJob(jobOne)
.withIdentity("triggerOne")
.build();
private static final Trigger triggerTwo = TriggerBuilder.newTrigger()
.forJob(jobOne)
.withIdentity("triggerTwo")
.build();
private static final Trigger triggerThree = TriggerBuilder.newTrigger()
.forJob(jobThree)
.withIdentity("triggerThree", "samples")
.build();
private final Scheduler scheduler;
private final QuartzEndpoint endpoint;
QuartzEndpointTests() {
this.scheduler = mock(Scheduler.class);
this.endpoint = new QuartzEndpoint(this.scheduler, Collections.emptyList());
}
@Test
void quartzReport() throws SchedulerException {
given(this.scheduler.getJobGroupNames()).willReturn(Arrays.asList("jobSamples", "DEFAULT"));
given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.singletonList("triggerSamples"));
QuartzDescriptor quartzReport = this.endpoint.quartzReport();
assertThat(quartzReport.getJobs().getGroups()).containsOnly("jobSamples", "DEFAULT");
assertThat(quartzReport.getTriggers().getGroups()).containsOnly("triggerSamples");
then(this.scheduler).should().getJobGroupNames();
then(this.scheduler).should().getTriggerGroupNames();
then(this.scheduler).shouldHaveNoMoreInteractions();
}
@Test
void quartzReportWithNoJob() throws SchedulerException {
given(this.scheduler.getJobGroupNames()).willReturn(Collections.emptyList());
given(this.scheduler.getTriggerGroupNames()).willReturn(Arrays.asList("triggerSamples", "DEFAULT"));
QuartzDescriptor quartzReport = this.endpoint.quartzReport();
assertThat(quartzReport.getJobs().getGroups()).isEmpty();
assertThat(quartzReport.getTriggers().getGroups()).containsOnly("triggerSamples", "DEFAULT");
}
@Test
void quartzReportWithNoTrigger() throws SchedulerException {
given(this.scheduler.getJobGroupNames()).willReturn(Collections.singletonList("jobSamples"));
given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.emptyList());
QuartzDescriptor quartzReport = this.endpoint.quartzReport();
assertThat(quartzReport.getJobs().getGroups()).containsOnly("jobSamples");
assertThat(quartzReport.getTriggers().getGroups()).isEmpty();
}
@Test
void quartzJobGroupsWithExistingGroups() throws SchedulerException {
mockJobs(jobOne, jobTwo, jobThree);
Map<String, Object> jobGroups = this.endpoint.quartzJobGroups().getGroups();
assertThat(jobGroups).containsOnlyKeys("DEFAULT", "samples");
assertThat(jobGroups).extractingByKey("DEFAULT", nestedMap())
.containsOnly(entry("jobs", Arrays.asList("jobOne", "jobTwo")));
assertThat(jobGroups).extractingByKey("samples", nestedMap())
.containsOnly(entry("jobs", Collections.singletonList("jobThree")));
}
@Test
void quartzJobGroupsWithNoGroup() throws SchedulerException {
given(this.scheduler.getJobGroupNames()).willReturn(Collections.emptyList());
Map<String, Object> jobGroups = this.endpoint.quartzJobGroups().getGroups();
assertThat(jobGroups).isEmpty();
}
@Test
void quartzTriggerGroupsWithExistingGroups() throws SchedulerException {
mockTriggers(triggerOne, triggerTwo, triggerThree);
given(this.scheduler.getPausedTriggerGroups()).willReturn(Collections.singleton("samples"));
Map<String, Object> triggerGroups = this.endpoint.quartzTriggerGroups().getGroups();
assertThat(triggerGroups).containsOnlyKeys("DEFAULT", "samples");
assertThat(triggerGroups).extractingByKey("DEFAULT", nestedMap())
.containsOnly(entry("paused", false), entry("triggers", Arrays.asList("triggerOne", "triggerTwo")));
assertThat(triggerGroups).extractingByKey("samples", nestedMap())
.containsOnly(entry("paused", true), entry("triggers", Collections.singletonList("triggerThree")));
}
@Test
void quartzTriggerGroupsWithNoGroup() throws SchedulerException {
given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.emptyList());
Map<String, Object> triggerGroups = this.endpoint.quartzTriggerGroups().getGroups();
assertThat(triggerGroups).isEmpty();
}
@Test
void quartzJobGroupSummaryWithInvalidGroup() throws SchedulerException {
given(this.scheduler.getJobGroupNames()).willReturn(Collections.singletonList("DEFAULT"));
QuartzJobGroupSummaryDescriptor summary = this.endpoint.quartzJobGroupSummary("unknown");
assertThat(summary).isNull();
}
@Test
void quartzJobGroupSummaryWithEmptyGroup() throws SchedulerException {
given(this.scheduler.getJobGroupNames()).willReturn(Collections.singletonList("samples"));
given(this.scheduler.getJobKeys(GroupMatcher.jobGroupEquals("samples"))).willReturn(Collections.emptySet());
QuartzJobGroupSummaryDescriptor summary = this.endpoint.quartzJobGroupSummary("samples");
assertThat(summary).isNotNull();
assertThat(summary.getGroup()).isEqualTo("samples");
assertThat(summary.getJobs()).isEmpty();
}
@Test
void quartzJobGroupSummaryWithJobs() throws SchedulerException {
mockJobs(jobOne, jobTwo);
QuartzJobGroupSummaryDescriptor summary = this.endpoint.quartzJobGroupSummary("DEFAULT");
assertThat(summary).isNotNull();
assertThat(summary.getGroup()).isEqualTo("DEFAULT");
Map<String, QuartzJobSummaryDescriptor> jobSummaries = summary.getJobs();
assertThat(jobSummaries).containsOnlyKeys("jobOne", "jobTwo");
QuartzJobSummaryDescriptor jobOne = jobSummaries.get("jobOne");
assertThat(jobOne).isNotNull();
assertThat(jobOne.getClassName()).isEqualTo(Job.class.getName());
QuartzJobSummaryDescriptor jobTwo = jobSummaries.get("jobTwo");
assertThat(jobTwo).isNotNull();
assertThat(jobTwo.getClassName()).isEqualTo(DelegatingJob.class.getName());
}
@Test
void quartzTriggerGroupSummaryWithInvalidGroup() throws SchedulerException {
given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.singletonList("DEFAULT"));
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("unknown");
assertThat(summary).isNull();
}
@Test
void quartzTriggerGroupSummaryWithEmptyGroup() throws SchedulerException {
given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.singletonList("samples"));
given(this.scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals("samples")))
.willReturn(Collections.emptySet());
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
assertThat(summary.getGroup()).isEqualTo("samples");
assertThat(summary.isPaused()).isFalse();
assertThat(summary.getTriggers().getCron()).isEmpty();
assertThat(summary.getTriggers().getSimple()).isEmpty();
assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty();
assertThat(summary.getTriggers().getCalendarInterval()).isEmpty();
assertThat(summary.getTriggers().getCustom()).isEmpty();
}
@Test
void quartzTriggerGroupSummaryWithCronTrigger() throws SchedulerException {
CronTrigger cronTrigger = TriggerBuilder.newTrigger()
.withIdentity("3am-every-day", "samples")
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0))
.build();
mockTriggers(cronTrigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
assertThat(summary.getGroup()).isEqualTo("samples");
assertThat(summary.isPaused()).isFalse();
assertThat(summary.getTriggers().getCron()).containsOnlyKeys("3am-every-day");
assertThat(summary.getTriggers().getSimple()).isEmpty();
assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty();
assertThat(summary.getTriggers().getCalendarInterval()).isEmpty();
assertThat(summary.getTriggers().getCustom()).isEmpty();
}
@Test
void quartzTriggerGroupSummaryWithCronTriggerDetails() throws SchedulerException {
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris");
CronTrigger cronTrigger = TriggerBuilder.newTrigger()
.withIdentity("3am-every-day", "samples")
.withPriority(3)
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0).inTimeZone(timeZone))
.build();
((OperableTrigger) cronTrigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) cronTrigger).setNextFireTime(nextFireTime);
mockTriggers(cronTrigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
Map<String, Object> triggers = summary.getTriggers().getCron();
assertThat(triggers).containsOnlyKeys("3am-every-day");
assertThat(triggers).extractingByKey("3am-every-day", nestedMap())
.containsOnly(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime),
entry("priority", 3), entry("expression", "0 0 3 ? * *"), entry("timeZone", timeZone));
}
@Test
void quartzTriggerGroupSummaryWithSimpleTrigger() throws SchedulerException {
SimpleTrigger simpleTrigger = TriggerBuilder.newTrigger()
.withIdentity("every-hour", "samples")
.withSchedule(SimpleScheduleBuilder.repeatHourlyForever(1))
.build();
mockTriggers(simpleTrigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
assertThat(summary.getGroup()).isEqualTo("samples");
assertThat(summary.isPaused()).isFalse();
assertThat(summary.getTriggers().getCron()).isEmpty();
assertThat(summary.getTriggers().getSimple()).containsOnlyKeys("every-hour");
assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty();
assertThat(summary.getTriggers().getCalendarInterval()).isEmpty();
assertThat(summary.getTriggers().getCustom()).isEmpty();
}
@Test
void quartzTriggerGroupSummaryWithSimpleTriggerDetails() throws SchedulerException {
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
SimpleTrigger simpleTrigger = TriggerBuilder.newTrigger()
.withIdentity("every-hour", "samples")
.withPriority(7)
.withSchedule(SimpleScheduleBuilder.repeatHourlyForever(1))
.build();
((OperableTrigger) simpleTrigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) simpleTrigger).setNextFireTime(nextFireTime);
mockTriggers(simpleTrigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
Map<String, Object> triggers = summary.getTriggers().getSimple();
assertThat(triggers).containsOnlyKeys("every-hour");
assertThat(triggers).extractingByKey("every-hour", nestedMap())
.containsOnly(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime),
entry("priority", 7), entry("interval", 3600000L));
}
@Test
void quartzTriggerGroupSummaryWithDailyIntervalTrigger() throws SchedulerException {
DailyTimeIntervalTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("every-hour-9am", "samples")
.withSchedule(DailyTimeIntervalScheduleBuilder.dailyTimeIntervalSchedule()
.startingDailyAt(TimeOfDay.hourAndMinuteOfDay(9, 0))
.withInterval(1, IntervalUnit.HOUR))
.build();
mockTriggers(trigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
assertThat(summary.getGroup()).isEqualTo("samples");
assertThat(summary.isPaused()).isFalse();
assertThat(summary.getTriggers().getCron()).isEmpty();
assertThat(summary.getTriggers().getSimple()).isEmpty();
assertThat(summary.getTriggers().getDailyTimeInterval()).containsOnlyKeys("every-hour-9am");
assertThat(summary.getTriggers().getCalendarInterval()).isEmpty();
assertThat(summary.getTriggers().getCustom()).isEmpty();
}
@Test
void quartzTriggerGroupSummaryWithDailyIntervalTriggerDetails() throws SchedulerException {
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
DailyTimeIntervalTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("every-hour-tue-thu", "samples")
.withPriority(4)
.withSchedule(DailyTimeIntervalScheduleBuilder.dailyTimeIntervalSchedule()
.onDaysOfTheWeek(Calendar.TUESDAY, Calendar.THURSDAY)
.startingDailyAt(TimeOfDay.hourAndMinuteOfDay(9, 0))
.endingDailyAt(TimeOfDay.hourAndMinuteOfDay(18, 0))
.withInterval(1, IntervalUnit.HOUR))
.build();
((OperableTrigger) trigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) trigger).setNextFireTime(nextFireTime);
mockTriggers(trigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
Map<String, Object> triggers = summary.getTriggers().getDailyTimeInterval();
assertThat(triggers).containsOnlyKeys("every-hour-tue-thu");
assertThat(triggers).extractingByKey("every-hour-tue-thu", nestedMap())
.containsOnly(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime),
entry("priority", 4), entry("interval", 3600000L), entry("startTimeOfDay", LocalTime.of(9, 0)),
entry("endTimeOfDay", LocalTime.of(18, 0)),
entry("daysOfWeek", new LinkedHashSet<>(Arrays.asList(3, 5))));
}
@Test
void quartzTriggerGroupSummaryWithCalendarIntervalTrigger() throws SchedulerException {
CalendarIntervalTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("once-a-week", "samples")
.withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule().withIntervalInWeeks(1))
.build();
mockTriggers(trigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
assertThat(summary.getGroup()).isEqualTo("samples");
assertThat(summary.isPaused()).isFalse();
assertThat(summary.getTriggers().getCron()).isEmpty();
assertThat(summary.getTriggers().getSimple()).isEmpty();
assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty();
assertThat(summary.getTriggers().getCalendarInterval()).containsOnlyKeys("once-a-week");
assertThat(summary.getTriggers().getCustom()).isEmpty();
}
@Test
void quartzTriggerGroupSummaryWithCalendarIntervalTriggerDetails() throws SchedulerException {
TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris");
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
CalendarIntervalTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("once-a-week", "samples")
.withPriority(8)
.withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule()
.withIntervalInWeeks(1)
.inTimeZone(timeZone))
.build();
((OperableTrigger) trigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) trigger).setNextFireTime(nextFireTime);
mockTriggers(trigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
Map<String, Object> triggers = summary.getTriggers().getCalendarInterval();
assertThat(triggers).containsOnlyKeys("once-a-week");
assertThat(triggers).extractingByKey("once-a-week", nestedMap())
.containsOnly(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime),
entry("priority", 8), entry("interval", 604800000L), entry("timeZone", timeZone));
}
@Test
void quartzTriggerGroupSummaryWithCustomTrigger() throws SchedulerException {
Trigger trigger = mock(Trigger.class);
given(trigger.getKey()).willReturn(TriggerKey.triggerKey("custom", "samples"));
mockTriggers(trigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
assertThat(summary.getGroup()).isEqualTo("samples");
assertThat(summary.isPaused()).isFalse();
assertThat(summary.getTriggers().getCron()).isEmpty();
assertThat(summary.getTriggers().getSimple()).isEmpty();
assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty();
assertThat(summary.getTriggers().getCalendarInterval()).isEmpty();
assertThat(summary.getTriggers().getCustom()).containsOnlyKeys("custom");
}
@Test
void quartzTriggerGroupSummaryWithCustomTriggerDetails() throws SchedulerException {
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
Trigger trigger = mock(Trigger.class);
given(trigger.getKey()).willReturn(TriggerKey.triggerKey("custom", "samples"));
given(trigger.getPreviousFireTime()).willReturn(previousFireTime);
given(trigger.getNextFireTime()).willReturn(nextFireTime);
given(trigger.getPriority()).willReturn(9);
mockTriggers(trigger);
QuartzTriggerGroupSummaryDescriptor summary = this.endpoint.quartzTriggerGroupSummary("samples");
assertThat(summary).isNotNull();
Map<String, Object> triggers = summary.getTriggers().getCustom();
assertThat(triggers).containsOnlyKeys("custom");
assertThat(triggers).extractingByKey("custom", nestedMap())
.containsOnly(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime),
entry("priority", 9), entry("trigger", trigger.toString()));
}
@Test
void quartzTriggerWithCronTrigger() throws SchedulerException {
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris");
CronTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("3am-every-day", "samples")
.withPriority(3)
.withDescription("Sample description")
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0).inTimeZone(timeZone))
.build();
((OperableTrigger) trigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) trigger).setNextFireTime(nextFireTime);
mockTriggers(trigger);
given(this.scheduler.getTriggerState(TriggerKey.triggerKey("3am-every-day", "samples")))
.willReturn(TriggerState.NORMAL);
Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "3am-every-day", true);
assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "3am-every-day"),
entry("description", "Sample description"), entry("type", "cron"), entry("state", TriggerState.NORMAL),
entry("priority", 3));
assertThat(triggerDetails).contains(entry("previousFireTime", previousFireTime),
entry("nextFireTime", nextFireTime));
assertThat(triggerDetails).doesNotContainKeys("simple", "dailyTimeInterval", "calendarInterval", "custom");
assertThat(triggerDetails).extractingByKey("cron", nestedMap())
.containsOnly(entry("expression", "0 0 3 ? * *"), entry("timeZone", timeZone));
}
@Test
void quartzTriggerWithSimpleTrigger() throws SchedulerException {
Date startTime = Date.from(Instant.parse("2020-01-01T09:00:00Z"));
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
Date endTime = Date.from(Instant.parse("2020-01-31T09:00:00Z"));
SimpleTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("every-hour", "samples")
.withPriority(20)
.withDescription("Every hour")
.startAt(startTime)
.endAt(endTime)
.withSchedule(SimpleScheduleBuilder.simpleSchedule().withIntervalInHours(1).withRepeatCount(2000))
.build();
((OperableTrigger) trigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) trigger).setNextFireTime(nextFireTime);
mockTriggers(trigger);
given(this.scheduler.getTriggerState(TriggerKey.triggerKey("every-hour", "samples")))
.willReturn(TriggerState.COMPLETE);
Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "every-hour", true);
assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "every-hour"),
entry("description", "Every hour"), entry("type", "simple"), entry("state", TriggerState.COMPLETE),
entry("priority", 20));
assertThat(triggerDetails).contains(entry("startTime", startTime), entry("previousFireTime", previousFireTime),
entry("nextFireTime", nextFireTime), entry("endTime", endTime));
assertThat(triggerDetails).doesNotContainKeys("cron", "dailyTimeInterval", "calendarInterval", "custom");
assertThat(triggerDetails).extractingByKey("simple", nestedMap())
.containsOnly(entry("interval", 3600000L), entry("repeatCount", 2000), entry("timesTriggered", 0));
}
@Test
void quartzTriggerWithDailyTimeIntervalTrigger() throws SchedulerException {
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
DailyTimeIntervalTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("every-hour-mon-wed", "samples")
.withDescription("Every working hour Mon Wed")
.withPriority(4)
.withSchedule(DailyTimeIntervalScheduleBuilder.dailyTimeIntervalSchedule()
.onDaysOfTheWeek(Calendar.MONDAY, Calendar.WEDNESDAY)
.startingDailyAt(TimeOfDay.hourAndMinuteOfDay(9, 0))
.endingDailyAt(TimeOfDay.hourAndMinuteOfDay(18, 0))
.withInterval(1, IntervalUnit.HOUR))
.build();
((OperableTrigger) trigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) trigger).setNextFireTime(nextFireTime);
mockTriggers(trigger);
given(this.scheduler.getTriggerState(TriggerKey.triggerKey("every-hour-mon-wed", "samples")))
.willReturn(TriggerState.NORMAL);
Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "every-hour-mon-wed", true);
assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "every-hour-mon-wed"),
entry("description", "Every working hour Mon Wed"), entry("type", "dailyTimeInterval"),
entry("state", TriggerState.NORMAL), entry("priority", 4));
assertThat(triggerDetails).contains(entry("previousFireTime", previousFireTime),
entry("nextFireTime", nextFireTime));
assertThat(triggerDetails).doesNotContainKeys("cron", "simple", "calendarInterval", "custom");
assertThat(triggerDetails).extractingByKey("dailyTimeInterval", nestedMap())
.containsOnly(entry("interval", 3600000L), entry("startTimeOfDay", LocalTime.of(9, 0)),
entry("endTimeOfDay", LocalTime.of(18, 0)),
entry("daysOfWeek", new LinkedHashSet<>(Arrays.asList(2, 4))), entry("repeatCount", -1),
entry("timesTriggered", 0));
}
@Test
void quartzTriggerWithCalendarTimeIntervalTrigger() throws SchedulerException {
TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris");
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
CalendarIntervalTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("once-a-week", "samples")
.withDescription("Once a week")
.withPriority(8)
.withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule()
.withIntervalInWeeks(1)
.inTimeZone(timeZone)
.preserveHourOfDayAcrossDaylightSavings(true))
.build();
((OperableTrigger) trigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) trigger).setNextFireTime(nextFireTime);
mockTriggers(trigger);
given(this.scheduler.getTriggerState(TriggerKey.triggerKey("once-a-week", "samples")))
.willReturn(TriggerState.BLOCKED);
Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "once-a-week", true);
assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "once-a-week"),
entry("description", "Once a week"), entry("type", "calendarInterval"),
entry("state", TriggerState.BLOCKED), entry("priority", 8));
assertThat(triggerDetails).contains(entry("previousFireTime", previousFireTime),
entry("nextFireTime", nextFireTime));
assertThat(triggerDetails).doesNotContainKeys("cron", "simple", "dailyTimeInterval", "custom");
assertThat(triggerDetails).extractingByKey("calendarInterval", nestedMap())
.containsOnly(entry("interval", 604800000L), entry("timeZone", timeZone),
entry("preserveHourOfDayAcrossDaylightSavings", true), entry("skipDayIfHourDoesNotExist", false),
entry("timesTriggered", 0));
}
@Test
void quartzTriggerWithCustomTrigger() throws SchedulerException {
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
Trigger trigger = mock(Trigger.class);
given(trigger.getKey()).willReturn(TriggerKey.triggerKey("custom", "samples"));
given(trigger.getPreviousFireTime()).willReturn(previousFireTime);
given(trigger.getNextFireTime()).willReturn(nextFireTime);
given(trigger.getPriority()).willReturn(9);
mockTriggers(trigger);
given(this.scheduler.getTriggerState(TriggerKey.triggerKey("custom", "samples")))
.willReturn(TriggerState.ERROR);
Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "custom", true);
assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "custom"), entry("type", "custom"),
entry("state", TriggerState.ERROR), entry("priority", 9));
assertThat(triggerDetails).contains(entry("previousFireTime", previousFireTime),
entry("nextFireTime", nextFireTime));
assertThat(triggerDetails).doesNotContainKeys("cron", "simple", "calendarInterval", "dailyTimeInterval");
assertThat(triggerDetails).extractingByKey("custom", nestedMap())
.containsOnly(entry("trigger", trigger.toString()));
}
@Test
void quartzTriggerWithDataMap() throws SchedulerException {
CronTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("3am-every-day", "samples")
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0))
.usingJobData("user", "user")
.usingJobData("password", "secret")
.usingJobData("url", "https://user:secret@example.com")
.build();
mockTriggers(trigger);
given(this.scheduler.getTriggerState(TriggerKey.triggerKey("3am-every-day", "samples")))
.willReturn(TriggerState.NORMAL);
Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "3am-every-day", true);
assertThat(triggerDetails).extractingByKey("data", nestedMap())
.containsOnly(entry("user", "user"), entry("password", "secret"),
entry("url", "https://user:secret@example.com"));
}
@Test
void quartzTriggerWithDataMapAndShowUnsanitizedFalse() throws SchedulerException {
CronTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("3am-every-day", "samples")
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0))
.usingJobData("user", "user")
.usingJobData("password", "secret")
.usingJobData("url", "https://user:secret@example.com")
.build();
mockTriggers(trigger);
given(this.scheduler.getTriggerState(TriggerKey.triggerKey("3am-every-day", "samples")))
.willReturn(TriggerState.NORMAL);
Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "3am-every-day", false);
assertThat(triggerDetails).extractingByKey("data", nestedMap())
.containsOnly(entry("user", "******"), entry("password", "******"), entry("url", "******"));
}
@ParameterizedTest(name = "unit {1}")
@MethodSource("intervalUnitParameters")
void canConvertIntervalUnit(int amount, IntervalUnit unit, Duration expectedDuration) throws SchedulerException {
CalendarIntervalTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity("trigger", "samples")
.withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule().withInterval(amount, unit))
.build();
mockTriggers(trigger);
Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "trigger", true);
assertThat(triggerDetails).extractingByKey("calendarInterval", nestedMap())
.contains(entry("interval", expectedDuration.toMillis()));
}
static Stream<Arguments> intervalUnitParameters() {
return Stream.of(Arguments.of(3, IntervalUnit.DAY, Duration.ofDays(3)),
Arguments.of(2, IntervalUnit.HOUR, Duration.ofHours(2)),
Arguments.of(5, IntervalUnit.MINUTE, Duration.ofMinutes(5)),
Arguments.of(1, IntervalUnit.MONTH, ChronoUnit.MONTHS.getDuration()),
Arguments.of(30, IntervalUnit.SECOND, Duration.ofSeconds(30)),
Arguments.of(1, IntervalUnit.WEEK, ChronoUnit.WEEKS.getDuration()),
Arguments.of(1, IntervalUnit.YEAR, ChronoUnit.YEARS.getDuration()));
}
@Test
void quartzJobWithoutTrigger() throws SchedulerException {
JobDetail job = JobBuilder.newJob(Job.class)
.withIdentity("hello", "samples")
.withDescription("A sample job")
.storeDurably()
.requestRecovery(false)
.build();
mockJobs(job);
QuartzJobDetailsDescriptor jobDetails = this.endpoint.quartzJob("samples", "hello", true);
assertThat(jobDetails).isNotNull();
assertThat(jobDetails.getGroup()).isEqualTo("samples");
assertThat(jobDetails.getName()).isEqualTo("hello");
assertThat(jobDetails.getDescription()).isEqualTo("A sample job");
assertThat(jobDetails.getClassName()).isEqualTo(Job.class.getName());
assertThat(jobDetails.isDurable()).isTrue();
assertThat(jobDetails.isRequestRecovery()).isFalse();
assertThat(jobDetails.getData()).isEmpty();
assertThat(jobDetails.getTriggers()).isEmpty();
}
@Test
void quartzJobWithTrigger() throws SchedulerException {
Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z"));
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").build();
TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris");
Trigger trigger = TriggerBuilder.newTrigger()
.withIdentity("3am-every-day", "samples")
.withPriority(4)
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0).inTimeZone(timeZone))
.build();
((OperableTrigger) trigger).setPreviousFireTime(previousFireTime);
((OperableTrigger) trigger).setNextFireTime(nextFireTime);
mockJobs(job);
mockTriggers(trigger);
given(this.scheduler.getTriggersOfJob(JobKey.jobKey("hello", "samples")))
.willAnswer((invocation) -> Collections.singletonList(trigger));
QuartzJobDetailsDescriptor jobDetails = this.endpoint.quartzJob("samples", "hello", true);
assertThat(jobDetails).isNotNull();
assertThat(jobDetails.getTriggers()).hasSize(1);
Map<String, Object> triggerDetails = jobDetails.getTriggers().get(0);
assertThat(triggerDetails).containsOnly(entry("group", "samples"), entry("name", "3am-every-day"),
entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime), entry("priority", 4));
}
@Test
void quartzJobOrdersTriggersAccordingToNextFireTime() throws SchedulerException {
JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").build();
mockJobs(job);
Date triggerOneNextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
CronTrigger triggerOne = TriggerBuilder.newTrigger()
.withIdentity("one", "samples")
.withPriority(5)
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0))
.build();
((OperableTrigger) triggerOne).setNextFireTime(triggerOneNextFireTime);
Date triggerTwoNextFireTime = Date.from(Instant.parse("2020-12-01T02:00:00Z"));
CronTrigger triggerTwo = TriggerBuilder.newTrigger()
.withIdentity("two", "samples")
.withPriority(10)
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(2, 0))
.build();
((OperableTrigger) triggerTwo).setNextFireTime(triggerTwoNextFireTime);
mockTriggers(triggerOne, triggerTwo);
given(this.scheduler.getTriggersOfJob(JobKey.jobKey("hello", "samples")))
.willAnswer((invocation) -> Arrays.asList(triggerOne, triggerTwo));
QuartzJobDetailsDescriptor jobDetails = this.endpoint.quartzJob("samples", "hello", true);
assertThat(jobDetails).isNotNull();
assertThat(jobDetails.getTriggers()).hasSize(2);
assertThat(jobDetails.getTriggers().get(0)).containsEntry("name", "two");
assertThat(jobDetails.getTriggers().get(1)).containsEntry("name", "one");
}
@Test
void quartzJobOrdersTriggersAccordingNextFireTimeAndPriority() throws SchedulerException {
JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").build();
mockJobs(job);
Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z"));
CronTrigger triggerOne = TriggerBuilder.newTrigger()
.withIdentity("one", "samples")
.withPriority(3)
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0))
.build();
((OperableTrigger) triggerOne).setNextFireTime(nextFireTime);
CronTrigger triggerTwo = TriggerBuilder.newTrigger()
.withIdentity("two", "samples")
.withPriority(7)
.withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0))
.build();
((OperableTrigger) triggerTwo).setNextFireTime(nextFireTime);
mockTriggers(triggerOne, triggerTwo);
given(this.scheduler.getTriggersOfJob(JobKey.jobKey("hello", "samples")))
.willAnswer((invocation) -> Arrays.asList(triggerOne, triggerTwo));
QuartzJobDetailsDescriptor jobDetails = this.endpoint.quartzJob("samples", "hello", true);
assertThat(jobDetails).isNotNull();
assertThat(jobDetails.getTriggers()).hasSize(2);
assertThat(jobDetails.getTriggers().get(0)).containsEntry("name", "two");
assertThat(jobDetails.getTriggers().get(1)).containsEntry("name", "one");
}
@Test
void quartzJobWithDataMap() throws SchedulerException {
JobDetail job = JobBuilder.newJob(Job.class)
.withIdentity("hello", "samples")
.usingJobData("user", "user")
.usingJobData("password", "secret")
.usingJobData("url", "https://user:secret@example.com")
.build();
mockJobs(job);
QuartzJobDetailsDescriptor jobDetails = this.endpoint.quartzJob("samples", "hello", true);
assertThat(jobDetails).isNotNull();
assertThat(jobDetails.getData()).containsOnly(entry("user", "user"), entry("password", "secret"),
entry("url", "https://user:secret@example.com"));
}
@Test
void quartzJobWithDataMapAndShowUnsanitizedFalse() throws SchedulerException {
JobDetail job = JobBuilder.newJob(Job.class)
.withIdentity("hello", "samples")
.usingJobData("user", "user")
.usingJobData("password", "secret")
.usingJobData("url", "https://user:secret@example.com")
.build();
mockJobs(job);
QuartzJobDetailsDescriptor jobDetails = this.endpoint.quartzJob("samples", "hello", false);
assertThat(jobDetails).isNotNull();
assertThat(jobDetails.getData()).containsOnly(entry("user", "******"), entry("password", "******"),
entry("url", "******"));
}
@Test
void quartzJobShouldBeTriggered() throws SchedulerException {
JobDetail job = JobBuilder.newJob(Job.class)
.withIdentity("hello", "samples")
.withDescription("A sample job")
.storeDurably()
.requestRecovery(false)
.build();
mockJobs(job);
QuartzJobTriggerDescriptor quartzJobTriggerDescriptor = this.endpoint.triggerQuartzJob("samples", "hello");
assertThat(quartzJobTriggerDescriptor).isNotNull();
assertThat(quartzJobTriggerDescriptor.getName()).isEqualTo("hello");
assertThat(quartzJobTriggerDescriptor.getGroup()).isEqualTo("samples");
assertThat(quartzJobTriggerDescriptor.getClassName()).isEqualTo("org.quartz.Job");
assertThat(quartzJobTriggerDescriptor.getTriggerTime()).isCloseTo(Instant.now(), within(5, ChronoUnit.SECONDS));
then(this.scheduler).should().triggerJob(new JobKey("hello", "samples"));
}
@Test
void quartzJobShouldNotBeTriggeredWhenJobDoesNotExist() throws SchedulerException {
QuartzJobTriggerDescriptor quartzJobTriggerDescriptor = this.endpoint.triggerQuartzJob("samples", "hello");
assertThat(quartzJobTriggerDescriptor).isNull();
then(this.scheduler).should(never()).triggerJob(any());
}
private void mockJobs(JobDetail... jobs) throws SchedulerException {
MultiValueMap<String, JobKey> jobKeys = new LinkedMultiValueMap<>();
for (JobDetail jobDetail : jobs) {
JobKey key = jobDetail.getKey();
given(this.scheduler.getJobDetail(key)).willReturn(jobDetail);
jobKeys.add(key.getGroup(), key);
}
given(this.scheduler.getJobGroupNames()).willReturn(new ArrayList<>(jobKeys.keySet()));
for (Entry<String, List<JobKey>> entry : jobKeys.entrySet()) {
given(this.scheduler.getJobKeys(GroupMatcher.jobGroupEquals(entry.getKey())))
.willReturn(new LinkedHashSet<>(entry.getValue()));
}
}
private void mockTriggers(Trigger... triggers) throws SchedulerException {
MultiValueMap<String, TriggerKey> triggerKeys = new LinkedMultiValueMap<>();
for (Trigger trigger : triggers) {
TriggerKey key = trigger.getKey();
given(this.scheduler.getTrigger(key)).willReturn(trigger);
triggerKeys.add(key.getGroup(), key);
}
given(this.scheduler.getTriggerGroupNames()).willReturn(new ArrayList<>(triggerKeys.keySet()));
for (Entry<String, List<TriggerKey>> entry : triggerKeys.entrySet()) {
given(this.scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals(entry.getKey())))
.willReturn(new LinkedHashSet<>(entry.getValue()));
}
}
@SuppressWarnings("rawtypes")
private static InstanceOfAssertFactory<Map, MapAssert<String, Object>> nestedMap() {
return InstanceOfAssertFactories.map(String.class, Object.class);
}
}
|
QuartzEndpointTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java
|
{
"start": 1199,
"end": 24070
}
|
class ____ extends SecurityInBasicRestTestCase {
private static final String READ_SECURITY_USER_AUTH_HEADER = "Basic cmVhZF9zZWN1cml0eV91c2VyOnJlYWQtc2VjdXJpdHktcGFzc3dvcmQ=";
private static final String TEST_USER_NO_READ_USERS_AUTH_HEADER = "Basic c2VjdXJpdHlfdGVzdF91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ=";
private static final Set<String> reservedUsers = Set.of(
"elastic",
"kibana",
"kibana_system",
"logstash_system",
"beats_system",
"apm_system",
"remote_monitoring_user"
);
private Request queryUserRequestWithAuth() {
return queryUserRequestWithAuth(false);
}
private Request queryUserRequestWithAuth(boolean withProfileId) {
final Request request = new Request(
randomFrom("POST", "GET"),
"/_security/_query/user" + (withProfileId ? "?with_profile_uid=true" : randomFrom("", "?with_profile_uid=false"))
);
request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER));
return request;
}
public void testQuery() throws IOException {
boolean withProfileId = randomBoolean();
// No users to match yet
assertQuery("", users -> assertThat(users, empty()), withProfileId);
int randomUserCount = createRandomUsers().size();
// An empty request body means search for all users (page size = 10)
assertQuery("", users -> assertThat(users.size(), equalTo(Math.min(randomUserCount, 10))));
// Match all
assertQuery(
String.format("""
{"query":{"match_all":{}},"from":0,"size":%s}""", randomUserCount),
users -> assertThat(users.size(), equalTo(randomUserCount)),
withProfileId
);
// Exists query
String field = randomFrom("username", "full_name", "roles", "enabled");
assertQuery(
String.format("""
{"query":{"exists":{"field":"%s"}},"from":0,"size":%s}""", field, randomUserCount),
users -> assertEquals(users.size(), randomUserCount),
withProfileId
);
// Prefix search
User prefixUser1 = createUser(
"mr-prefix1",
new String[] { "master-of-the-universe", "some-other-role" },
"Prefix1",
"email@something.com",
Map.of(),
true
);
User prefixUser2 = createUser(
"mr-prefix2",
new String[] { "master-of-the-world", "some-other-role" },
"Prefix2",
"email@something.com",
Map.of(),
true
);
// Extract map to be able to assert on profile id (not part of User model)
Map<String, Object> prefixUser1Map;
Map<String, Object> prefixUser2Map;
if (withProfileId) {
prefixUser1Map = userToMap(prefixUser1, doActivateProfile(prefixUser1.principal(), "100%-security-guaranteed"));
prefixUser2Map = userToMap(prefixUser2, doActivateProfile(prefixUser2.principal(), "100%-security-guaranteed"));
assertTrue(prefixUser1Map.containsKey("profile_uid"));
assertTrue(prefixUser2Map.containsKey("profile_uid"));
} else {
prefixUser1Map = userToMap(prefixUser1);
prefixUser2Map = userToMap(prefixUser2);
}
assertQuery("""
{"query":{"bool":{"must":[{"prefix":{"roles":"master-of-the"}}]}},"sort":["username"]}""", returnedUsers -> {
assertThat(returnedUsers, hasSize(2));
assertUser(prefixUser1Map, returnedUsers.get(0));
assertUser(prefixUser2Map, returnedUsers.get(1));
}, withProfileId);
// Wildcard search
assertQuery("""
{ "query": { "wildcard": {"username": "mr-prefix*"} },"sort":["username"]}""", users -> {
assertThat(users.size(), equalTo(2));
assertUser(prefixUser1Map, users.get(0));
assertUser(prefixUser2Map, users.get(1));
}, withProfileId);
// Terms query
assertQuery("""
{"query":{"terms":{"roles":["some-other-role"]}},"sort":["username"]}""", users -> {
assertThat(users.size(), equalTo(2));
assertUser(prefixUser1Map, users.get(0));
assertUser(prefixUser2Map, users.get(1));
}, withProfileId);
// Test other fields
User otherFieldsTestUser = createUser(
"batman-official-user",
new String[] { "bat-cave-admin" },
"Batman",
"batman@hotmail.com",
Map.of(),
true
);
String enabledTerm = "\"enabled\":true";
String fullNameTerm = "\"full_name\":\"batman\"";
String emailTerm = "\"email\":\"batman@hotmail.com\"";
final String term = randomFrom(enabledTerm, fullNameTerm, emailTerm);
assertQuery(
Strings.format("""
{"query":{"term":{%s}},"size":100}""", term),
users -> assertThat(
users.stream().map(u -> u.get(User.Fields.USERNAME.getPreferredName()).toString()).toList(),
hasItem("batman-official-user")
),
withProfileId
);
Map<String, Object> otherFieldsTestUserMap;
if (withProfileId) {
otherFieldsTestUserMap = userToMap(
otherFieldsTestUser,
doActivateProfile(otherFieldsTestUser.principal(), "100%-security-guaranteed")
);
assertTrue(otherFieldsTestUserMap.containsKey("profile_uid"));
} else {
otherFieldsTestUserMap = userToMap(otherFieldsTestUser);
}
// Test complex query
assertQuery("""
{ "query": {"bool": {"must": [
{"wildcard": {"username": "batman-official*"}},
{"term": {"enabled": true}}],"filter": [{"prefix": {"roles": "bat-cave"}}]}}}""", users -> {
assertThat(users.size(), equalTo(1));
assertUser(otherFieldsTestUserMap, users.get(0));
}, withProfileId);
// Search for fields outside the allowlist fails
assertQueryError(400, """
{ "query": { "prefix": {"not_allowed": "ABC"} } }""");
// Search for fields that are not allowed in Query DSL but used internally by the service itself
final String fieldName = randomFrom("type", "password");
assertQueryError(400, Strings.format("""
{ "query": { "term": {"%s": "%s"} } }""", fieldName, randomAlphaOfLengthBetween(3, 8)));
// User without read_security gets 403 trying to search Users
assertQueryError(TEST_USER_NO_READ_USERS_AUTH_HEADER, 403, """
{ "query": { "wildcard": {"name": "*prefix*"} } }""");
// Span term query not supported
assertQueryError(400, """
{"query":{"span_term":{"username": "X"} } }""");
// Fuzzy query not supported
assertQueryError(400, """
{ "query": { "fuzzy": { "username": "X" } } }""");
// Make sure we can't query reserved users
String reservedUsername = getReservedUsernameAndAssertExists();
assertQuery(String.format("""
{"query":{"term":{"username":"%s"}}}""", reservedUsername), users -> assertTrue(users.isEmpty()));
}
public void testPagination() throws IOException {
final List<User> users = createRandomUsers();
final int from = randomIntBetween(0, 3);
final int size = randomIntBetween(2, 5);
final int remaining = users.size() - from;
// Using string only sorting to simplify test
final String sortField = "username";
final List<Map<String, Object>> allUserInfos = new ArrayList<>(remaining);
{
Request request = queryUserRequestWithAuth();
request.setJsonEntity("{\"from\":" + from + ",\"size\":" + size + ",\"sort\":[\"" + sortField + "\"]}");
allUserInfos.addAll(collectUsers(request, users.size()));
}
// first batch should be a full page
assertThat(allUserInfos.size(), equalTo(size));
while (allUserInfos.size() < remaining) {
final Request request = queryUserRequestWithAuth();
final List<Object> sortValues = extractSortValues(allUserInfos.get(allUserInfos.size() - 1));
request.setJsonEntity(Strings.format("""
{"size":%s,"sort":["%s"],"search_after":["%s"]}
""", size, sortField, sortValues.get(0)));
final List<Map<String, Object>> userInfoPage = collectUsers(request, users.size());
if (userInfoPage.isEmpty() && allUserInfos.size() < remaining) {
fail("fail to retrieve all Users, expect [" + remaining + "], got [" + allUserInfos + "]");
}
allUserInfos.addAll(userInfoPage);
// Before all users are retrieved, each page should be a full page
if (allUserInfos.size() < remaining) {
assertThat(userInfoPage.size(), equalTo(size));
}
}
// Assert sort values match the field of User information
assertThat(
allUserInfos.stream().map(m -> m.get(sortField)).toList(),
equalTo(allUserInfos.stream().map(m -> extractSortValues(m).get(0)).toList())
);
// Assert that all users match the created users and that they're sorted correctly
assertUsers(users, allUserInfos, sortField, from);
// size can be zero, but total should still reflect the number of users matched
final Request request = queryUserRequestWithAuth(false);
request.setJsonEntity("{\"size\":0}");
final Response response = client().performRequest(request);
assertOK(response);
final Map<String, Object> responseMap = responseAsMap(response);
assertThat(responseMap.get("total"), equalTo(users.size()));
assertThat(responseMap.get("count"), equalTo(0));
}
@SuppressWarnings("unchecked")
public void testSort() throws IOException {
final List<User> testUsers = List.of(
createUser("a", new String[] { "4", "5", "6" }),
createUser("b", new String[] { "5", "6" }),
createUser("c", new String[] { "7", "8" })
);
assertQuery("""
{"sort":[{"username":{"order":"desc"}}]}""", users -> {
assertThat(users.size(), equalTo(3));
for (int i = 2, j = 0; i >= 0; i--, j++) {
assertUser(testUsers.get(j), users.get(i));
assertThat(users.get(i).get("username"), equalTo(((List<String>) users.get(i).get("_sort")).get(0)));
}
});
assertQuery("""
{"sort":[{"username":{"order":"asc"}}]}""", users -> {
assertThat(users.size(), equalTo(3));
for (int i = 0; i <= 2; i++) {
assertUser(testUsers.get(i), users.get(i));
assertThat(users.get(i).get("username"), equalTo(((List<String>) users.get(i).get("_sort")).get(0)));
}
});
assertQuery("""
{"sort":[{"roles":{"order":"asc"}}]}""", users -> {
assertThat(users.size(), equalTo(3));
for (int i = 0; i <= 2; i++) {
assertUser(testUsers.get(i), users.get(i));
// Only first element of array is used for sorting
assertThat(((List<String>) users.get(i).get("roles")).get(0), equalTo(((List<String>) users.get(i).get("_sort")).get(0)));
}
});
// Make sure sorting on _doc works
assertQuery("""
{"sort":["_doc"]}""", users -> assertThat(users.size(), equalTo(3)));
// Make sure multi-field sorting works
assertQuery("""
{"sort":[{"username":{"order":"asc"}}, {"roles":{"order":"asc"}}]}""", users -> {
assertThat(users.size(), equalTo(3));
for (int i = 0; i <= 2; i++) {
assertUser(testUsers.get(i), users.get(i));
assertThat(users.get(i).get("username"), equalTo(((List<String>) users.get(i).get("_sort")).get(0)));
assertThat(((List<String>) users.get(i).get("roles")).get(0), equalTo(((List<String>) users.get(i).get("_sort")).get(1)));
}
});
final String invalidFieldName = randomFrom("doc_type", "invalid", "password");
assertQueryError(400, "{\"sort\":[\"" + invalidFieldName + "\"]}");
final String invalidSortName = randomFrom("email", "full_name");
assertQueryError(
READ_SECURITY_USER_AUTH_HEADER,
400,
Strings.format("{\"sort\":[\"%s\"]}", invalidSortName),
Strings.format("sorting is not supported for field [%s]", invalidSortName)
);
}
private String getReservedUsernameAndAssertExists() throws IOException {
String username = randomFrom(reservedUsers);
final Request request = new Request("GET", "/_security/user");
if (randomBoolean()) {
// Update the user to create it in the security index
Request putUserRequest = new Request("PUT", "/_security/user/" + username);
putUserRequest.setJsonEntity("{\"enabled\": true}");
}
request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER));
final Response response = client().performRequest(request);
assertOK(response);
final Map<String, Object> responseMap = responseAsMap(response);
assertNotNull(responseMap.get(username));
return username;
}
@SuppressWarnings("unchecked")
private List<Object> extractSortValues(Map<String, Object> userInfo) {
return (List<Object>) userInfo.get("_sort");
}
private List<Map<String, Object>> collectUsers(Request request, int total) throws IOException {
final Response response = client().performRequest(request);
assertOK(response);
final Map<String, Object> responseMap = responseAsMap(response);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> userInfos = (List<Map<String, Object>>) responseMap.get("users");
assertThat(responseMap.get("total"), equalTo(total));
assertThat(responseMap.get("count"), equalTo(userInfos.size()));
return userInfos;
}
private void assertQueryError(int statusCode, String body) {
assertQueryError(READ_SECURITY_USER_AUTH_HEADER, statusCode, body);
}
private void assertQueryError(String authHeader, int statusCode, String body) {
assertQueryError(authHeader, statusCode, body, null);
}
private void assertQueryError(String authHeader, int statusCode, String body, String errorMessage) {
final Request request = new Request(randomFrom("GET", "POST"), "/_security/_query/user");
request.setJsonEntity(body);
request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, authHeader));
final ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request));
assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(statusCode));
if (errorMessage != null) {
assertTrue(responseException.getMessage().contains(errorMessage));
}
}
private void assertQuery(String body, Consumer<List<Map<String, Object>>> userVerifier) throws IOException {
assertQuery(body, userVerifier, false);
}
private void assertQuery(String body, Consumer<List<Map<String, Object>>> userVerifier, boolean withProfileId) throws IOException {
final Request request = queryUserRequestWithAuth(withProfileId);
request.setJsonEntity(body);
final Response response = client().performRequest(request);
assertOK(response);
final Map<String, Object> responseMap = responseAsMap(response);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> users = (List<Map<String, Object>>) responseMap.get("users");
userVerifier.accept(users);
}
private void assertUser(User expectedUser, Map<String, Object> actualUser) {
assertUser(userToMap(expectedUser), actualUser);
}
@SuppressWarnings("unchecked")
private void assertUser(Map<String, Object> expectedUser, Map<String, Object> actualUser) {
assertEquals(expectedUser.get(User.Fields.USERNAME.getPreferredName()), actualUser.get(User.Fields.USERNAME.getPreferredName()));
assertArrayEquals(
((List<String>) expectedUser.get(User.Fields.ROLES.getPreferredName())).toArray(),
((List<String>) actualUser.get(User.Fields.ROLES.getPreferredName())).toArray()
);
assertEquals(expectedUser.getOrDefault("profile_uid", null), actualUser.getOrDefault("profile_uid", null));
assertEquals(expectedUser.get(User.Fields.FULL_NAME.getPreferredName()), actualUser.get(User.Fields.FULL_NAME.getPreferredName()));
assertEquals(expectedUser.get(User.Fields.EMAIL.getPreferredName()), actualUser.get(User.Fields.EMAIL.getPreferredName()));
assertEquals(expectedUser.get(User.Fields.METADATA.getPreferredName()), actualUser.get(User.Fields.METADATA.getPreferredName()));
assertEquals(expectedUser.get(User.Fields.ENABLED.getPreferredName()), actualUser.get(User.Fields.ENABLED.getPreferredName()));
}
private Map<String, Object> userToMap(User user) {
return userToMap(user, null);
}
private Map<String, Object> userToMap(User user, @Nullable String profileId) {
Map<String, Object> userMap = new HashMap<>();
userMap.put(User.Fields.USERNAME.getPreferredName(), user.principal());
userMap.put(User.Fields.ROLES.getPreferredName(), Arrays.stream(user.roles()).toList());
userMap.put(User.Fields.FULL_NAME.getPreferredName(), user.fullName());
userMap.put(User.Fields.EMAIL.getPreferredName(), user.email());
userMap.put(User.Fields.METADATA.getPreferredName(), user.metadata());
userMap.put(User.Fields.ENABLED.getPreferredName(), user.enabled());
if (profileId != null) {
userMap.put("profile_uid", profileId);
}
return userMap;
}
private void assertUsers(List<User> expectedUsers, List<Map<String, Object>> actualUsers, String sortField, int from) {
assertEquals(expectedUsers.size() - from, actualUsers.size());
List<Map<String, Object>> sortedExpectedUsers = expectedUsers.stream()
.map(this::userToMap)
.sorted(Comparator.comparing(user -> user.get(sortField).toString()))
.toList();
for (int i = from; i < sortedExpectedUsers.size(); i++) {
assertUser(sortedExpectedUsers.get(i), actualUsers.get(i - from));
}
}
public static Map<String, Object> randomUserMetadata() {
return ESTestCase.randomFrom(
Map.of(
"employee_id",
ESTestCase.randomAlphaOfLength(5),
"number",
1,
"numbers",
List.of(1, 3, 5),
"extra",
Map.of("favorite pizza", "margherita", "age", 42)
),
Map.of(ESTestCase.randomAlphaOfLengthBetween(3, 8), ESTestCase.randomAlphaOfLengthBetween(3, 8)),
Map.of(),
null
);
}
private String doActivateProfile(String username, String password) {
final Request activateProfileRequest = new Request("POST", "_security/profile/_activate");
activateProfileRequest.setJsonEntity(org.elasticsearch.common.Strings.format("""
{
"grant_type": "password",
"username": "%s",
"password": "%s"
}""", username, password));
final Response activateProfileResponse;
try {
activateProfileResponse = adminClient().performRequest(activateProfileRequest);
assertOK(activateProfileResponse);
return responseAsMap(activateProfileResponse).get("uid").toString();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private List<User> createRandomUsers() throws IOException {
int randomUserCount = randomIntBetween(8, 15);
final List<User> users = new ArrayList<>(randomUserCount);
for (int i = 0; i < randomUserCount; i++) {
users.add(
createUser(
randomValueOtherThanMany(reservedUsers::contains, () -> randomAlphaOfLengthBetween(3, 8)) + "-" + i,
randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8)),
randomAlphaOfLengthBetween(3, 8),
randomAlphaOfLengthBetween(3, 8),
randomUserMetadata(),
randomBoolean()
)
);
}
return users;
}
private User createUser(String userName, String[] roles) throws IOException {
return createUser(
userName,
roles,
randomAlphaOfLengthBetween(3, 8),
randomAlphaOfLengthBetween(3, 8),
randomUserMetadata(),
randomBoolean()
);
}
private User createUser(String userName, String[] roles, String fullName, String email, Map<String, Object> metadata, boolean enabled)
throws IOException {
final Request request = new Request("POST", "/_security/user/" + userName);
BytesReference source = BytesReference.bytes(
jsonBuilder().map(
Map.of(
User.Fields.USERNAME.getPreferredName(),
userName,
User.Fields.ROLES.getPreferredName(),
roles,
User.Fields.FULL_NAME.getPreferredName(),
fullName,
User.Fields.EMAIL.getPreferredName(),
email,
User.Fields.METADATA.getPreferredName(),
metadata == null ? Map.of() : metadata,
User.Fields.PASSWORD.getPreferredName(),
"100%-security-guaranteed",
User.Fields.ENABLED.getPreferredName(),
enabled
)
)
);
request.setJsonEntity(source.utf8ToString());
Response response = adminClient().performRequest(request);
assertOK(response);
assertTrue((boolean) responseAsMap(response).get("created"));
return new User(userName, roles, fullName, email, metadata, enabled);
}
}
|
QueryUserIT
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/utils/FuzzyGroupKeyPattern.java
|
{
"start": 7851,
"end": 9420
}
|
class ____ {
String groupKey;
boolean exist;
/**
* Constructs a new ConfigState instance with the given group key and existence flag.
*
* @param groupKey The group key associated with the configuration.
* @param exist {@code true} if the configuration exists, {@code false} otherwise.
*/
public GroupKeyState(String groupKey, boolean exist) {
this.groupKey = groupKey;
this.exist = exist;
}
/**
* Retrieves the group key associated with the configuration.
*
* @return The group key.
*/
public String getGroupKey() {
return groupKey;
}
/**
* Sets the group key associated with the configuration.
*
* @param groupKey The group key to set.
*/
public void setGroupKey(String groupKey) {
this.groupKey = groupKey;
}
/**
* Checks whether the configuration exists or not.
*
* @return {@code true} if the configuration exists, {@code false} otherwise.
*/
public boolean isExist() {
return exist;
}
/**
* Sets the existence flag of the configuration.
*
* @param exist {@code true} if the configuration exists, {@code false} otherwise.
*/
public void setExist(boolean exist) {
this.exist = exist;
}
}
}
|
GroupKeyState
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/records/impl/pb/GetReservationHomeSubClusterResponsePBImpl.java
|
{
"start": 1740,
"end": 4326
}
|
class ____
extends GetReservationHomeSubClusterResponse {
private GetReservationHomeSubClusterResponseProto proto =
GetReservationHomeSubClusterResponseProto.getDefaultInstance();
private GetReservationHomeSubClusterResponseProto.Builder builder = null;
private boolean viaProto = false;
public GetReservationHomeSubClusterResponsePBImpl() {
builder = GetReservationHomeSubClusterResponseProto.newBuilder();
}
public GetReservationHomeSubClusterResponsePBImpl(
GetReservationHomeSubClusterResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public GetReservationHomeSubClusterResponseProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void mergeLocalToProto() {
if (viaProto) {
maybeInitBuilder();
}
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetReservationHomeSubClusterResponseProto.newBuilder(proto);
}
viaProto = false;
}
private void mergeLocalToBuilder() {
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
@Override
public ReservationHomeSubCluster getReservationHomeSubCluster() {
GetReservationHomeSubClusterResponseProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasAppSubclusterMap()) {
return null;
}
return convertFromProtoFormat(p.getAppSubclusterMap());
}
@Override
public void setReservationHomeSubCluster(
ReservationHomeSubCluster reservationInfo) {
maybeInitBuilder();
if (reservationInfo == null) {
builder.clearAppSubclusterMap();
return;
}
builder.setAppSubclusterMap(convertToProtoFormat(reservationInfo));
}
private ReservationHomeSubCluster convertFromProtoFormat(
ReservationHomeSubClusterProto sc) {
return new ReservationHomeSubClusterPBImpl(sc);
}
private ReservationHomeSubClusterProto convertToProtoFormat(
ReservationHomeSubCluster sc) {
return ((ReservationHomeSubClusterPBImpl) sc).getProto();
}
}
|
GetReservationHomeSubClusterResponsePBImpl
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/event/AnnotationDrivenEventListenerTests.java
|
{
"start": 3450,
"end": 26490
}
|
class ____ {
private ConfigurableApplicationContext context;
private EventCollector eventCollector;
private CountDownLatch countDownLatch; // 1 call by default
@AfterEach
void closeContext() {
if (this.context != null) {
this.context.close();
}
}
@Test
void simpleEventJavaConfig() {
load(TestEventListener.class);
TestEvent event = new TestEvent(this, "test");
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
this.eventCollector.clear();
TestEvent otherEvent = new TestEvent(this, Integer.valueOf(1));
this.context.publishEvent(otherEvent);
this.eventCollector.assertEvent(listener, otherEvent);
this.eventCollector.assertTotalEventsCount(1);
context.getBean(ApplicationEventMulticaster.class).removeApplicationListeners(l ->
l instanceof SmartApplicationListener sal && sal.getListenerId().contains("TestEvent"));
this.eventCollector.clear();
this.context.publishEvent(event);
this.eventCollector.assertNoEventReceived(listener);
}
@Test
void simpleEventXmlConfig() {
this.context = new ClassPathXmlApplicationContext(
"org/springframework/context/event/simple-event-configuration.xml");
TestEvent event = new TestEvent(this, "test");
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector = getEventCollector(this.context);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
context.getBean(ApplicationEventMulticaster.class).removeApplicationListeners(l ->
l instanceof SmartApplicationListener sal && sal.getListenerId().contains("TestEvent"));
this.eventCollector.clear();
this.context.publishEvent(event);
this.eventCollector.assertNoEventReceived(listener);
}
@Test
void metaAnnotationIsDiscovered() {
load(MetaAnnotationListenerTestBean.class);
MetaAnnotationListenerTestBean bean = this.context.getBean(MetaAnnotationListenerTestBean.class);
this.eventCollector.assertNoEventReceived(bean);
TestEvent event = new TestEvent();
this.context.publishEvent(event);
this.eventCollector.assertEvent(bean, event);
this.eventCollector.assertTotalEventsCount(1);
context.getBean(ApplicationEventMulticaster.class).removeApplicationListeners(l ->
l instanceof SmartApplicationListener sal && sal.getListenerId().equals("foo"));
this.eventCollector.clear();
this.context.publishEvent(event);
this.eventCollector.assertNoEventReceived(bean);
}
@Test
void contextEventsAreReceived() {
load(ContextEventListener.class);
ContextEventListener listener = this.context.getBean(ContextEventListener.class);
List<Object> events = this.eventCollector.getEvents(listener);
assertThat(events).as("Wrong number of initial context events").hasSize(1);
assertThat(events.get(0).getClass()).isEqualTo(ContextRefreshedEvent.class);
this.context.start();
List<Object> eventsAfterStart = this.eventCollector.getEvents(listener);
assertThat(eventsAfterStart).as("Wrong number of context events on start").hasSize(2);
assertThat(eventsAfterStart.get(1).getClass()).isEqualTo(ContextStartedEvent.class);
this.eventCollector.assertTotalEventsCount(2);
this.context.stop();
List<Object> eventsAfterStop = this.eventCollector.getEvents(listener);
assertThat(eventsAfterStop).as("Wrong number of context events on stop").hasSize(3);
assertThat(eventsAfterStop.get(2).getClass()).isEqualTo(ContextStoppedEvent.class);
this.eventCollector.assertTotalEventsCount(3);
this.context.close();
List<Object> eventsAfterClose = this.eventCollector.getEvents(listener);
assertThat(eventsAfterClose).as("Wrong number of context events on close").hasSize(4);
assertThat(eventsAfterClose.get(3).getClass()).isEqualTo(ContextClosedEvent.class);
this.eventCollector.assertTotalEventsCount(4);
// Further events are supposed to be ignored after context close
this.context.publishEvent(new ContextClosedEvent(this.context));
this.eventCollector.assertTotalEventsCount(4);
}
@Test
void methodSignatureNoEvent() {
@SuppressWarnings("resource")
AnnotationConfigApplicationContext failingContext =
new AnnotationConfigApplicationContext();
failingContext.register(BasicConfiguration.class,
InvalidMethodSignatureEventListener.class);
assertThatExceptionOfType(BeanInitializationException.class).isThrownBy(failingContext::refresh)
.withMessageContaining(InvalidMethodSignatureEventListener.class.getName())
.withMessageContaining("cannotBeCalled");
}
@Test
void simpleReply() {
load(TestEventListener.class, ReplyEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, "dummy");
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertEvent(listener, new TestEvent(replyEventListener, event.getId(), "dummy")); // reply
this.eventCollector.assertTotalEventsCount(2);
}
@Test
void nullReplyIgnored() {
load(TestEventListener.class, ReplyEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, null); // No response
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void arrayReply() {
load(TestEventListener.class, ReplyEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, new String[]{"first", "second"});
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertEvent(listener, "first", "second"); // reply
this.eventCollector.assertTotalEventsCount(3);
}
@Test
void collectionReply() {
load(TestEventListener.class, ReplyEventListener.class);
Set<Object> replies = new LinkedHashSet<>();
replies.add("first");
replies.add(4L);
replies.add("third");
AnotherTestEvent event = new AnotherTestEvent(this, replies);
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertEvent(listener, "first", "third"); // reply (no listener for 4L)
this.eventCollector.assertTotalEventsCount(3);
}
@Test
void collectionReplyNullValue() {
load(TestEventListener.class, ReplyEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, Arrays.asList(null, "test"));
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertEvent(listener, "test");
this.eventCollector.assertTotalEventsCount(2);
}
@Test
void completableFutureReply() {
load(TestEventListener.class, ReplyEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, CompletableFuture.completedFuture("dummy"));
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertEvent(listener, "dummy"); // reply
this.eventCollector.assertTotalEventsCount(2);
}
@Test
void monoReply() {
load(TestEventListener.class, ReplyEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, Mono.just("dummy"));
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertEvent(listener, "dummy"); // reply
this.eventCollector.assertTotalEventsCount(2);
}
@Test
void fluxReply() {
load(TestEventListener.class, ReplyEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, Flux.just("dummy1", "dummy2"));
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertEvent(listener, "dummy1", "dummy2"); // reply
this.eventCollector.assertTotalEventsCount(3);
}
@Test
void eventListenerWorksWithSimpleInterfaceProxy() {
load(ScopedProxyTestBean.class);
SimpleService proxy = this.context.getBean(SimpleService.class);
assertThat(proxy).as("bean should be a proxy").isInstanceOf(Advised.class);
this.eventCollector.assertNoEventReceived(proxy.getId());
this.context.publishEvent(new ContextRefreshedEvent(this.context));
this.eventCollector.assertNoEventReceived(proxy.getId());
TestEvent event = new TestEvent();
this.context.publishEvent(event);
this.eventCollector.assertEvent(proxy.getId(), event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void eventListenerWorksWithAnnotatedInterfaceProxy() {
load(AnnotatedProxyTestBean.class);
AnnotatedSimpleService proxy = this.context.getBean(AnnotatedSimpleService.class);
assertThat(proxy).as("bean should be a proxy").isInstanceOf(Advised.class);
this.eventCollector.assertNoEventReceived(proxy.getId());
this.context.publishEvent(new ContextRefreshedEvent(this.context));
this.eventCollector.assertNoEventReceived(proxy.getId());
TestEvent event = new TestEvent();
this.context.publishEvent(event);
this.eventCollector.assertEvent(proxy.getId(), event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void eventListenerWorksWithCglibProxy() {
load(CglibProxyTestBean.class);
CglibProxyTestBean proxy = this.context.getBean(CglibProxyTestBean.class);
assertThat(AopUtils.isCglibProxy(proxy)).as("bean should be a cglib proxy").isTrue();
this.eventCollector.assertNoEventReceived(proxy.getId());
this.context.publishEvent(new ContextRefreshedEvent(this.context));
this.eventCollector.assertNoEventReceived(proxy.getId());
TestEvent event = new TestEvent();
this.context.publishEvent(event);
this.eventCollector.assertEvent(proxy.getId(), event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void privateMethodOnCglibProxyFails() {
assertThatExceptionOfType(BeanInitializationException.class).isThrownBy(() ->
load(CglibProxyWithPrivateMethod.class))
.withCauseInstanceOf(IllegalStateException.class);
}
@Test
void eventListenerWorksWithCustomScope() {
load(CustomScopeTestBean.class);
CustomScope customScope = new CustomScope();
this.context.getBeanFactory().registerScope("custom", customScope);
CustomScopeTestBean proxy = this.context.getBean(CustomScopeTestBean.class);
assertThat(AopUtils.isCglibProxy(proxy)).as("bean should be a cglib proxy").isTrue();
this.eventCollector.assertNoEventReceived(proxy.getId());
this.context.publishEvent(new ContextRefreshedEvent(this.context));
this.eventCollector.assertNoEventReceived(proxy.getId());
customScope.active = false;
this.context.publishEvent(new ContextRefreshedEvent(this.context));
customScope.active = true;
this.eventCollector.assertNoEventReceived(proxy.getId());
TestEvent event = new TestEvent();
this.context.publishEvent(event);
this.eventCollector.assertEvent(proxy.getId(), event);
this.eventCollector.assertTotalEventsCount(1);
customScope.active = false;
assertThatExceptionOfType(BeanCreationException.class).isThrownBy(() ->
this.context.publishEvent(new TestEvent()))
.withCauseInstanceOf(IllegalStateException.class);
}
@Test
void asyncProcessingApplied() throws InterruptedException {
loadAsync(AsyncEventListener.class);
String threadName = Thread.currentThread().getName();
AnotherTestEvent event = new AnotherTestEvent(this, threadName);
AsyncEventListener listener = this.context.getBean(AsyncEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(event);
this.countDownLatch.await(2, TimeUnit.SECONDS);
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void asyncProcessingAppliedWithInterfaceProxy() throws InterruptedException {
doLoad(AsyncConfigurationWithInterfaces.class, SimpleProxyTestBean.class);
String threadName = Thread.currentThread().getName();
AnotherTestEvent event = new AnotherTestEvent(this, threadName);
SimpleService listener = this.context.getBean(SimpleService.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(event);
this.countDownLatch.await(2, TimeUnit.SECONDS);
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void asyncProcessingAppliedWithScopedProxy() throws InterruptedException {
doLoad(AsyncConfigurationWithInterfaces.class, ScopedProxyTestBean.class);
String threadName = Thread.currentThread().getName();
AnotherTestEvent event = new AnotherTestEvent(this, threadName);
SimpleService listener = this.context.getBean(SimpleService.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(event);
this.countDownLatch.await(2, TimeUnit.SECONDS);
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void exceptionPropagated() {
load(ExceptionEventListener.class);
TestEvent event = new TestEvent(this, "fail");
ExceptionEventListener listener = this.context.getBean(ExceptionEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
assertThatIllegalStateException().isThrownBy(() ->
this.context.publishEvent(event))
.withMessage("Test exception");
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void exceptionNotPropagatedWithAsync() throws InterruptedException {
loadAsync(ExceptionEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, "fail");
ExceptionEventListener listener = this.context.getBean(ExceptionEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(event);
this.countDownLatch.await(2, TimeUnit.SECONDS);
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void listenerWithSimplePayload() {
load(TestEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent("test");
this.eventCollector.assertEvent(listener, "test");
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void listenerWithNonMatchingPayload() {
load(TestEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(123L);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertTotalEventsCount(0);
}
@Test
void replyWithPayload() {
load(TestEventListener.class, ReplyEventListener.class);
AnotherTestEvent event = new AnotherTestEvent(this, "String");
ReplyEventListener replyEventListener = this.context.getBean(ReplyEventListener.class);
TestEventListener listener = this.context.getBean(TestEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertNoEventReceived(replyEventListener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(replyEventListener, event);
this.eventCollector.assertEvent(listener, "String"); // reply
this.eventCollector.assertTotalEventsCount(2);
}
@Test
void listenerWithGenericApplicationEvent() {
load(GenericEventListener.class);
GenericEventListener listener = this.context.getBean(GenericEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent("TEST");
this.eventCollector.assertEvent(listener, "TEST");
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void listenerWithResolvableTypeEvent() {
load(ResolvableTypeEventListener.class);
ResolvableTypeEventListener listener = this.context.getBean(ResolvableTypeEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
GenericEventPojo<String> event = new GenericEventPojo<>("TEST");
this.context.publishEvent(event);
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void listenerWithResolvableTypeEventWrongGeneric() {
load(ResolvableTypeEventListener.class);
ResolvableTypeEventListener listener = this.context.getBean(ResolvableTypeEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
GenericEventPojo<Long> event = new GenericEventPojo<>(123L);
this.context.publishEvent(event);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertTotalEventsCount(0);
}
@Test
void conditionMatch() {
validateConditionMatch(ConditionalEventListener.class);
}
@Test
void conditionMatchWithProxy() {
validateConditionMatch(ConditionalEventListener.class, MethodValidationPostProcessor.class);
}
private void validateConditionMatch(Class<?>... classes) {
long timestamp = System.currentTimeMillis();
load(classes);
TestEvent event = new TestEvent(this, "OK");
ConditionalEventInterface listener = this.context.getBean(ConditionalEventInterface.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(event);
this.eventCollector.assertEvent(listener, event);
this.eventCollector.assertTotalEventsCount(1);
this.context.publishEvent("OK");
this.eventCollector.assertEvent(listener, event, "OK");
this.eventCollector.assertTotalEventsCount(2);
this.context.publishEvent("NOT OK");
this.eventCollector.assertTotalEventsCount(2);
this.context.publishEvent(timestamp);
this.eventCollector.assertEvent(listener, event, "OK", timestamp);
this.eventCollector.assertTotalEventsCount(3);
this.context.publishEvent(42d);
this.eventCollector.assertEvent(listener, event, "OK", timestamp, 42d);
this.eventCollector.assertTotalEventsCount(4);
}
@Test
void conditionDoesNotMatch() {
long maxLong = Long.MAX_VALUE;
load(ConditionalEventListener.class);
TestEvent event = new TestEvent(this, "KO");
TestEventListener listener = this.context.getBean(ConditionalEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent(event);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertTotalEventsCount(0);
this.context.publishEvent("KO");
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertTotalEventsCount(0);
this.context.publishEvent(maxLong);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertTotalEventsCount(0);
this.context.publishEvent(24d);
this.eventCollector.assertNoEventReceived(listener);
this.eventCollector.assertTotalEventsCount(0);
}
@Test
void orderedListeners() {
load(OrderedTestListener.class);
OrderedTestListener listener = this.context.getBean(OrderedTestListener.class);
assertThat(listener.order).isEmpty();
this.context.publishEvent("whatever");
assertThat(listener.order).contains("first", "second", "third");
}
@Test
void publicSubclassWithInheritedEventListener() {
load(PublicSubclassWithInheritedEventListener.class);
TestEventListener listener = this.context.getBean(PublicSubclassWithInheritedEventListener.class);
this.eventCollector.assertNoEventReceived(listener);
this.context.publishEvent("test");
this.eventCollector.assertEvent(listener, "test");
this.eventCollector.assertTotalEventsCount(1);
}
@Test
void missingListenerBeanIgnored() {
load(MissingEventListener.class);
context.getBean(UseMissingEventListener.class);
context.publishEvent(new TestEvent(this));
}
private void load(Class<?>... classes) {
List<Class<?>> allClasses = new ArrayList<>();
allClasses.add(BasicConfiguration.class);
allClasses.addAll(Arrays.asList(classes));
doLoad(allClasses.toArray(new Class<?>[0]));
}
private void loadAsync(Class<?>... classes) {
List<Class<?>> allClasses = new ArrayList<>();
allClasses.add(AsyncConfiguration.class);
allClasses.addAll(Arrays.asList(classes));
doLoad(allClasses.toArray(new Class<?>[0]));
}
private void doLoad(Class<?>... classes) {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(classes);
this.eventCollector = ctx.getBean(EventCollector.class);
this.countDownLatch = ctx.getBean(CountDownLatch.class);
this.context = ctx;
}
private EventCollector getEventCollector(ConfigurableApplicationContext context) {
return context.getBean(EventCollector.class);
}
@Configuration
static
|
AnnotationDrivenEventListenerTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/test/java/org/springframework/boot/test/context/ImportsContextCustomizerTests.java
|
{
"start": 3951,
"end": 4060
}
|
class ____ {
}
@Metadata(d2 = "bar")
@Import(TestImportSelector.class)
static
|
FirstKotlinAnnotatedTestClass
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/BalancerParameters.java
|
{
"start": 5228,
"end": 7978
}
|
class ____ {
// Defaults
private BalancingPolicy policy = BalancingPolicy.Node.INSTANCE;
private double threshold = 10.0;
private int maxIdleIteration =
NameNodeConnector.DEFAULT_MAX_IDLE_ITERATIONS;
private Set<String> excludedNodes = Collections.<String> emptySet();
private Set<String> includedNodes = Collections.<String> emptySet();
private Set<String> sourceNodes = Collections.<String> emptySet();
private Set<String> excludedSourceNodes = Collections.<String> emptySet();
private Set<String> targetNodes = Collections.<String> emptySet();
private Set<String> excludedTargetNodes = Collections.<String> emptySet();
private Set<String> blockpools = Collections.<String> emptySet();
private boolean runDuringUpgrade = false;
private boolean runAsService = false;
private boolean sortTopNodes = false;
private int limitOverUtilizedNum = Integer.MAX_VALUE;
private long hotBlockTimeInterval = 0;
Builder() {
}
Builder setBalancingPolicy(BalancingPolicy p) {
this.policy = p;
return this;
}
Builder setThreshold(double t) {
this.threshold = t;
return this;
}
Builder setMaxIdleIteration(int m) {
this.maxIdleIteration = m;
return this;
}
Builder setHotBlockTimeInterval(long t) {
this.hotBlockTimeInterval = t;
return this;
}
Builder setExcludedNodes(Set<String> nodes) {
this.excludedNodes = nodes;
return this;
}
Builder setIncludedNodes(Set<String> nodes) {
this.includedNodes = nodes;
return this;
}
Builder setSourceNodes(Set<String> nodes) {
this.sourceNodes = nodes;
return this;
}
Builder setExcludedSourceNodes(Set<String> nodes) {
this.excludedSourceNodes = nodes;
return this;
}
Builder setTargetNodes(Set<String> nodes) {
this.targetNodes = nodes;
return this;
}
Builder setExcludedTargetNodes(Set<String> nodes) {
this.excludedTargetNodes = nodes;
return this;
}
Builder setBlockpools(Set<String> pools) {
this.blockpools = pools;
return this;
}
Builder setRunDuringUpgrade(boolean run) {
this.runDuringUpgrade = run;
return this;
}
Builder setRunAsService(boolean asService) {
this.runAsService = asService;
return this;
}
Builder setSortTopNodes(boolean shouldSortTopNodes) {
this.sortTopNodes = shouldSortTopNodes;
return this;
}
Builder setLimitOverUtilizedNum(int overUtilizedNum) {
this.limitOverUtilizedNum = overUtilizedNum;
return this;
}
BalancerParameters build() {
return new BalancerParameters(this);
}
}
}
|
Builder
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/LiteProtoToString.java
|
{
"start": 2043,
"end": 4773
}
|
class ____ extends AbstractToString {
private static final String LITE_ENUM_MESSAGE =
"toString() on lite proto enums will generate different representations of the value from"
+ " development and optimized builds. Consider using #getNumber if you only need a"
+ " serialized representation of the value, or #name if you really need the name."
+ "";
private static final TypePredicate IS_LITE_PROTO =
allOf(
isDescendantOf("com.google.protobuf.MessageLite"),
not(isDescendantOf("com.google.protobuf.Message")),
not(isExactType("com.google.protobuf.UnknownFieldSet")));
private static final TypePredicate IS_LITE_ENUM =
allOf(
isDescendantOf("com.google.protobuf.Internal.EnumLite"),
not(isDescendantOf("com.google.protobuf.ProtocolMessageEnum")),
not(isDescendantOf("com.google.protobuf.Descriptors.EnumValueDescriptor")),
not(isDescendantOf("com.google.protobuf.AbstractMessageLite.InternalOneOfEnum")));
private static final ImmutableSet<String> METHODS_STRIPPED_BY_OPTIMIZER =
ImmutableSet.<String>builder()
.add("atVerbose", "atFine", "atFiner", "atFinest", "atDebug", "atConfig", "atInfo")
.add("v", "d", "i")
.build();
@Inject
LiteProtoToString(ErrorProneFlags flags) {
super(flags);
}
@Override
protected TypePredicate typePredicate() {
return LiteProtoToString::matches;
}
private static boolean matches(Type type, VisitorState state) {
if (state.errorProneOptions().isTestOnlyTarget()) {
return false;
}
if (isStrippedLogMessage(state)) {
return false;
}
return IS_LITE_PROTO.apply(type, state) || IS_LITE_ENUM.apply(type, state);
}
private static boolean isStrippedLogMessage(VisitorState state) {
return stream(state.getPath()).anyMatch(LiteProtoToString::isStrippedLogMessage);
}
private static boolean isStrippedLogMessage(Tree tree) {
for (; tree instanceof MethodInvocationTree; tree = getReceiver((MethodInvocationTree) tree)) {
if (METHODS_STRIPPED_BY_OPTIMIZER.contains(getSymbol(tree).getSimpleName().toString())) {
return true;
}
}
return false;
}
@Override
protected Optional<String> descriptionMessageForDefaultMatch(Type type, VisitorState state) {
return Optional.of(IS_LITE_ENUM.apply(type, state) ? LITE_ENUM_MESSAGE : message());
}
@Override
protected Optional<Fix> implicitToStringFix(ExpressionTree tree, VisitorState state) {
return Optional.empty();
}
@Override
protected Optional<Fix> toStringFix(Tree parent, ExpressionTree tree, VisitorState state) {
return Optional.empty();
}
}
|
LiteProtoToString
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/ThrowableInvertedStackTraceRendererFactory.java
|
{
"start": 924,
"end": 1428
}
|
class ____ extends ThrowableStackTraceRendererFactory {
static final ThrowableInvertedStackTraceRendererFactory INSTANCE = new ThrowableInvertedStackTraceRendererFactory();
private ThrowableInvertedStackTraceRendererFactory() {}
@Override
ThrowableInvertedStackTraceRenderer createStackTraceRenderer(ThrowableFormatOptions options) {
return new ThrowableInvertedStackTraceRenderer(options.getIgnorePackages(), options.getLines());
}
}
|
ThrowableInvertedStackTraceRendererFactory
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/route/builder/GatewayFilterSpecTests.java
|
{
"start": 8311,
"end": 8575
}
|
class ____ implements GatewayFilter, Ordered {
@Override
public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) {
return Mono.empty();
}
@Override
public int getOrder() {
return 1000;
}
}
protected static
|
MyOrderedFilter
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/updatemethods/CompanyMapper.java
|
{
"start": 450,
"end": 929
}
|
interface ____ {
CompanyMapper INSTANCE = Mappers.getMapper( CompanyMapper.class );
void toCompanyEntity(CompanyDto dto, @MappingTarget CompanyEntity entity);
DepartmentInBetween toInBetween(DepartmentDto dto);
@Mappings({
@Mapping( target = "employees", ignore = true ),
@Mapping( target = "secretaryToEmployee", ignore = true )
})
void toDepartmentEntity(DepartmentInBetween dto, @MappingTarget DepartmentEntity entity);
}
|
CompanyMapper
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/transaction/TransactionalTestExecutionListenerTests.java
|
{
"start": 12044,
"end": 12328
}
|
interface ____ {
@AliasFor(annotation = Transactional.class)
String transactionManager() default "";
@AliasFor(annotation = Transactional.class)
Propagation propagation() default REQUIRED;
}
@BeforeTransaction
@Retention(RetentionPolicy.RUNTIME)
private @
|
MetaTxWithOverride
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedInvocationNameFormatterTests.java
|
{
"start": 2848,
"end": 10073
}
|
class ____ {
private final Locale originalLocale = Locale.getDefault();
@AfterEach
void restoreLocale() {
Locale.setDefault(originalLocale);
}
@Test
void formatsDisplayName() {
var formatter = formatter(DISPLAY_NAME_PLACEHOLDER, "enigma");
assertEquals("enigma", format(formatter, 1, arguments()));
assertEquals("enigma", format(formatter, 2, arguments()));
}
@Test
void formatsDisplayNameContainingApostrophe() {
String displayName = "display'Zero";
var formatter = formatter(DISPLAY_NAME_PLACEHOLDER, "display'Zero");
assertEquals(displayName, format(formatter, 1, arguments()));
assertEquals(displayName, format(formatter, 2, arguments()));
}
@Test
void formatsDisplayNameContainingFormatElements() {
String displayName = "{enigma} {0} '{1}'";
var formatter = formatter(DISPLAY_NAME_PLACEHOLDER, displayName);
assertEquals(displayName, format(formatter, 1, arguments()));
assertEquals(displayName, format(formatter, 2, arguments()));
}
@Test
void formatsInvocationIndex() {
var formatter = formatter(INDEX_PLACEHOLDER, "enigma");
assertEquals("1", format(formatter, 1, arguments()));
assertEquals("2", format(formatter, 2, arguments()));
}
@Test
void defaultDisplayName() {
var formatter = formatter(DEFAULT_DISPLAY_NAME, "IGNORED");
var formattedName = format(formatter, 1, arguments("apple", "banana"));
assertThat(formattedName).isEqualTo("[1] \"apple\", \"banana\"");
}
@Test
void formatsIndividualArguments() {
var formatter = formatter("{0} -> {1}", "enigma");
assertEquals("\"foo\" -> 42", format(formatter, 1, arguments("foo", 42)));
}
@Test
void formatsCompleteArgumentsList() {
var formatter = formatter(ARGUMENTS_PLACEHOLDER, "enigma");
// @formatter:off
Arguments args = arguments(
42,
'$',
"enigma",
null,
new int[] { 1, 2, 3 },
new String[] { "foo", "bar" },
new Integer[][] { { 2, 4 }, { 3, 9 } }
);
// @formatter:on
assertEquals("42, '$', \"enigma\", null, [1, 2, 3], [foo, bar], [[2, 4], [3, 9]]", format(formatter, 1, args));
}
@Test
void formatsCompleteArgumentsListWithNames() {
var testMethod = ParameterizedTestCases.getMethod("parameterizedTest", int.class, String.class, Object[].class);
var formatter = formatter(ARGUMENTS_WITH_NAMES_PLACEHOLDER, "enigma", testMethod);
var formattedName = format(formatter, 1, arguments(42, "enigma", new Object[] { "foo", 1 }));
assertEquals("someNumber = 42, someString = \"enigma\", someArray = [foo, 1]", formattedName);
}
@Test
void formatsCompleteArgumentsListWithoutNamesForAggregators() {
var testMethod = ParameterizedTestCases.getMethod("parameterizedTestWithAggregator", int.class, String.class);
var formatter = formatter(ARGUMENTS_WITH_NAMES_PLACEHOLDER, "enigma", testMethod);
var formattedName = format(formatter, 1, arguments(42, "foo", "bar"));
assertEquals("someNumber = 42, \"foo\", \"bar\"", formattedName);
}
@Test
void formatsCompleteArgumentsListWithArrays() {
var formatter = formatter(ARGUMENTS_PLACEHOLDER, "enigma");
// Explicit test for https://github.com/junit-team/junit-framework/issues/814
assertEquals("[foo, bar]", format(formatter, 1, arguments((Object) new String[] { "foo", "bar" })));
assertEquals("[foo, bar], 42, true", format(formatter, 1, arguments(new String[] { "foo", "bar" }, 42, true)));
}
@Test
void formatsEverythingUsingCustomPattern() {
var pattern = DISPLAY_NAME_PLACEHOLDER + " " + INDEX_PLACEHOLDER + " :: " + ARGUMENTS_PLACEHOLDER + " :: {1}";
var formatter = formatter(pattern, "enigma");
assertEquals("enigma 1 :: \"foo\", \"bar\" :: \"bar\"", format(formatter, 1, arguments("foo", "bar")));
assertEquals("enigma 2 :: \"foo\", 42 :: 42", format(formatter, 2, arguments("foo", 42)));
}
@Test
void formatDoesNotAlterArgumentsArray() {
Object[] actual = { 1, "two", Byte.valueOf("-128"), new Integer[][] { { 2, 4 }, { 3, 9 } } };
var formatter = formatter(ARGUMENTS_PLACEHOLDER, "enigma");
var expected = Arrays.copyOf(actual, actual.length);
assertEquals("1, \"two\", -128, [[2, 4], [3, 9]]", format(formatter, 1, arguments(actual)));
assertArrayEquals(expected, actual);
}
@Test
void formatDoesNotRaiseAnArrayStoreException() {
var formatter = formatter("{0} -> {1}", "enigma");
Object[] arguments = new Number[] { 1, 2 };
assertEquals("1 -> 2", format(formatter, 1, arguments(arguments)));
}
@Test
void throwsReadableExceptionForInvalidPattern() {
var exception = assertThrows(JUnitException.class, () -> formatter("{index", "enigma"));
assertNotNull(exception.getCause());
assertEquals(IllegalArgumentException.class, exception.getCause().getClass());
}
@Test
void formattingDoesNotFailIfArgumentToStringImplementationReturnsNull() {
var formatter = formatter(ARGUMENTS_PLACEHOLDER, "enigma");
var formattedName = format(formatter, 1, arguments(new ToStringReturnsNull(), "foo"));
assertThat(formattedName).isEqualTo("null, \"foo\"");
}
@Test
void formattingDoesNotFailIfArgumentToStringImplementationThrowsAnException() {
var formatter = formatter(ARGUMENTS_PLACEHOLDER, "enigma");
var formattedName = format(formatter, 1, arguments(new ToStringThrowsException(), "foo"));
assertThat(formattedName).startsWith(ToStringThrowsException.class.getName() + "@");
assertThat(formattedName).endsWith("\"foo\"");
}
@ParameterizedTest(name = "{0}")
@CsvSource(delimiter = '|', textBlock = """
US | 42.23 is positive on 2019 Jan 13 at 12:34:56
DE | 42,23 is positive on 13.01.2019 at 12:34:56
""")
void customFormattingExpressionsAreSupported(Locale locale, String expectedValue) {
var pattern = "[{index}] {1,number,#.##} is {1,choice,0<positive} on {0,date} at {0,time} even though {2}";
Locale.setDefault(Locale.US);
var date = Date.from(
LocalDate.of(2019, 1, 13).atTime(LocalTime.of(12, 34, 56)).atZone(ZoneId.systemDefault()).toInstant());
Locale.setDefault(locale);
var formatter = formatter(pattern, "enigma");
var formattedName = format(formatter, 1,
arguments(date, new BigDecimal("42.23"), new ToStringThrowsException()));
assertThat(formattedName).startsWith(
"[1] " + expectedValue + " even though " + ToStringThrowsException.class.getName() + "@");
}
@Test
void ignoresExcessPlaceholders() {
var formatter = formatter("{0}, {1}", "enigma");
var formattedName = format(formatter, 1, arguments("foo"));
assertThat(formattedName).isEqualTo("\"foo\", {1}");
}
@Test
void placeholdersCanBeOmitted() {
var formatter = formatter("{0}", "enigma");
var formattedName = format(formatter, 1, arguments("foo", "bar"));
assertThat(formattedName).isEqualTo("\"foo\"");
}
@Test
void placeholdersCanBeSkipped() {
var formatter = formatter("{0}, {2}", "enigma");
var formattedName = format(formatter, 1, arguments("foo", "bar", "baz"));
assertThat(formattedName).isEqualTo("\"foo\", \"baz\"");
}
@Test
void truncatesArgumentsThatExceedMaxLength() {
var formatter = formatter("{arguments}", "display name", 3);
var formattedName = format(formatter, 1, arguments("fo", "foo", "food"));
assertThat(formattedName).isEqualTo("\"fo\", \"foo\", \"fo…\"");
}
@Nested
|
ParameterizedInvocationNameFormatterTests
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/admin/indices/sampling/GetAllSampleConfigurationActionRequestTests.java
|
{
"start": 861,
"end": 2383
}
|
class ____ extends ESTestCase {
protected GetAllSampleConfigurationAction.Request createTestInstance() {
return new GetAllSampleConfigurationAction.Request(randomBoundedTimeValue());
}
public void testGettersAndSetters() {
TimeValue timeout = randomBoundedTimeValue();
GetAllSampleConfigurationAction.Request request = new GetAllSampleConfigurationAction.Request(timeout);
assertThat(request.masterTimeout(), equalTo(timeout));
}
public void testCreateTaskReturnsCancellableTask() {
GetAllSampleConfigurationAction.Request request = createTestInstance();
long taskId = randomLong();
String type = randomAlphaOfLength(10);
String action = GetAllSampleConfigurationAction.NAME;
TaskId parentTaskId = new TaskId(randomAlphaOfLength(10), randomLong());
Map<String, String> headers = Map.of("header1", "value1");
Task task = request.createTask(taskId, type, action, parentTaskId, headers);
assertThat(task, notNullValue());
assertThat(task, instanceOf(org.elasticsearch.tasks.CancellableTask.class));
assertThat(task.getId(), equalTo(taskId));
assertThat(task.getType(), equalTo(type));
assertThat(task.getAction(), equalTo(action));
assertThat(task.getParentTaskId(), equalTo(parentTaskId));
}
private TimeValue randomBoundedTimeValue() {
return TimeValue.timeValueSeconds(randomIntBetween(5, 10));
}
}
|
GetAllSampleConfigurationActionRequestTests
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/configuration/internal/ClassesAuditingData.java
|
{
"start": 4711,
"end": 8523
}
|
enum ____ isn't an entity mapping and instead is one
// to a basic type. In this use case, there is nothing special to do.
final ClassAuditingData referencedAuditingData = entityNameToAuditingData.get( referencedEntityName );
addMapEnumeratedKey( property.getValue(), property.getPropertyAccessorName(), referencedAuditingData );
}
}
// HHH-9108
// Added support to handle nested property calculations for components.
// This is useful for AuditMappedBy inside an Embeddable that holds a collection of entities.
if ( propertyAuditingData instanceof ComponentAuditingData ) {
final ComponentAuditingData componentAuditingData = ( ComponentAuditingData) propertyAuditingData;
final Component component = (Component) property.getValue();
for ( String componentPropertyName : componentAuditingData.getNonSyntheticPropertyNames() ) {
final Property componentProperty = component.getProperty( componentPropertyName );
updateCalculatedProperty( entityName, componentProperty, componentPropertyName, componentAuditingData );
}
}
}
private void setAuditMappedByInsertable(
String referencedEntityName,
String entityName,
ClassAuditingData referencedAuditData,
PropertyAuditingData propertyAuditingData) {
forcePropertyInsertable(
referencedAuditData,
propertyAuditingData.getAuditMappedBy(),
entityName,
referencedEntityName
);
forcePropertyInsertable(
referencedAuditData,
propertyAuditingData.getPositionMappedBy(),
entityName,
referencedEntityName
);
}
private void addSyntheticIndexProperty(List value, String propertyAccessorName, ClassAuditingData classAuditingData) {
final Value indexValue = value.getIndex();
if ( indexValue != null && indexValue.getSelectables().size() > 0 ) {
final String indexColumnName = indexValue.getSelectables().get( 0 ).getText();
if ( indexColumnName != null ) {
final PropertyAuditingData auditingData = new PropertyAuditingData(
indexColumnName,
propertyAccessorName,
RelationTargetNotFoundAction.ERROR,
false,
true,
indexValue
);
classAuditingData.addPropertyAuditingData( indexColumnName, auditingData );
}
}
}
private void addMapEnumeratedKey(Value value, String propertyAccessorName, ClassAuditingData classAuditingData) {
if ( value instanceof org.hibernate.mapping.Map ) {
final Value indexValue = ( (org.hibernate.mapping.Map) value ).getIndex();
if ( indexValue != null && indexValue.getSelectables().size() > 0 ) {
final String indexColumnName = indexValue.getSelectables().get( 0 ).getText();
if ( !StringTools.isEmpty( indexColumnName ) ) {
final PropertyAuditingData propertyAuditingData = new PropertyAuditingData(
indexColumnName,
propertyAccessorName,
RelationTargetNotFoundAction.ERROR,
true,
true,
indexValue
);
classAuditingData.addPropertyAuditingData( indexColumnName, propertyAuditingData );
}
}
}
}
private void forcePropertyInsertable(
ClassAuditingData classAuditingData,
String propertyName,
String entityName,
String referencedEntityName) {
if ( propertyName != null ) {
if ( classAuditingData.getPropertyAuditingData( propertyName ) == null ) {
throw new EnversMappingException(
String.format(
Locale.ENGLISH,
"@AuditMappedBy points to a property that doesn't exist: %s.%s",
referencedEntityName,
propertyName
)
);
}
LOG.debugf(
"Non-insertable property %s.%s will be made insertable because a matching @AuditMappedBy was found in the %s entity",
referencedEntityName,
propertyName,
entityName
);
classAuditingData.getPropertyAuditingData( propertyName ).setForceInsertable( true );
}
}
}
|
type
|
java
|
alibaba__nacos
|
test/core-test/src/test/java/com/alibaba/nacos/test/ability/AbilityDiscovery.java
|
{
"start": 11884,
"end": 12689
}
|
class ____ extends RequestHandler<ConfigQueryRequest, ConfigQueryResponse> {
public ClusterClientRequestHandler(RequestFilters requestFilters) throws NoSuchFieldException, IllegalAccessException {
Field declaredField = RequestHandler.class.getDeclaredField("requestFilters");
declaredField.setAccessible(true);
declaredField.set(this, requestFilters);
}
@Override
public ConfigQueryResponse handle(ConfigQueryRequest request, RequestMeta meta) throws NacosException {
if (meta.getConnectionAbility(AbilityKey.CLUSTER_CLIENT_TEST_1).equals(AbilityStatus.SUPPORTED)) {
clusterSuccess = true;
}
return new ConfigQueryResponse();
}
}
}
|
ClusterClientRequestHandler
|
java
|
apache__camel
|
core/camel-core-engine/src/main/java/org/apache/camel/impl/DefaultModelReifierFactory.java
|
{
"start": 2001,
"end": 4068
}
|
class ____ implements ModelReifierFactory {
@Override
public Route createRoute(CamelContext camelContext, Object routeDefinition) {
return new RouteReifier(camelContext, (ProcessorDefinition<?>) routeDefinition).createRoute();
}
@Override
public DataFormat createDataFormat(CamelContext camelContext, Object dataFormatDefinition) {
return DataFormatReifier.reifier(camelContext, (DataFormatDefinition) dataFormatDefinition).createDataFormat();
}
@Override
public Processor createErrorHandler(Route route, Processor processor) throws Exception {
return createErrorHandler(route, route.getErrorHandlerFactory(), processor);
}
@Override
public Processor createErrorHandler(Route route, ErrorHandlerFactory errorHandlerFactory, Processor processor)
throws Exception {
return ErrorHandlerReifier.reifier(route, errorHandlerFactory).createErrorHandler(processor);
}
@Override
public ErrorHandlerFactory createDefaultErrorHandler() {
return new DefaultErrorHandlerDefinition();
}
@Override
public Expression createExpression(CamelContext camelContext, Object expressionDefinition) {
return ExpressionReifier.reifier(camelContext, (ExpressionDefinition) expressionDefinition).createExpression();
}
@Override
public Predicate createPredicate(CamelContext camelContext, Object expressionDefinition) {
return ExpressionReifier.reifier(camelContext, (ExpressionDefinition) expressionDefinition).createPredicate();
}
@Override
public Transformer createTransformer(CamelContext camelContext, Object transformerDefinition) {
return TransformerReifier.reifier(camelContext, (TransformerDefinition) transformerDefinition).createTransformer();
}
@Override
public Validator createValidator(CamelContext camelContext, Object transformerDefinition) {
return ValidatorReifier.reifier(camelContext, (ValidatorDefinition) transformerDefinition).createValidator();
}
}
|
DefaultModelReifierFactory
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/verification/VerificationEvent.java
|
{
"start": 302,
"end": 901
}
|
interface ____ {
/**
* @return The mock that a verification happened on.
*/
Object getMock();
/**
* @return the {@link VerificationMode} that was used.
*/
VerificationMode getMode();
/**
* @return the {@link VerificationData} that was verified on.
*/
VerificationData getData();
/**
* A nullable Throwable if it is null, the verification succeeded,
* otherwise the throwable contains the cause of why the verification failed.
*
* @return null or the error.
*/
Throwable getVerificationError();
}
|
VerificationEvent
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/aop/introduction/generics/PublisherIntroduction.java
|
{
"start": 882,
"end": 2304
}
|
class ____ implements MethodInterceptor<GenericPublisher<?>, Object> {
@Nullable
@Override
public Object intercept(final MethodInvocationContext<GenericPublisher<?>, Object> context) {
final Method method = context.getTargetMethod();
if (isEqualsMethod(method)) {
// Only consider equal when proxies are identical.
return context.getTarget() == context.getParameterValues()[0];
} else if (isHashCodeMethod(method)) {
return hashCode();
} else if (isToStringMethod(method)) {
return toString();
} else {
return context.getParameterValues()[0].getClass().getSimpleName();
}
}
private static boolean isEqualsMethod(final Method method) {
if ((method == null) || !"equals".equals(method.getName())) {
return false;
}
final Class<?>[] paramTypes = method.getParameterTypes();
return (paramTypes.length == 1) && (paramTypes[0] == Object.class);
}
private static boolean isHashCodeMethod(final Method method) {
return (method != null) && "hashCode".equals(method.getName()) && (method.getParameterTypes().length == 0);
}
private static boolean isToStringMethod(final Method method) {
return (method != null) && "toString".equals(method.getName()) && (method.getParameterTypes().length == 0);
}
}
|
PublisherIntroduction
|
java
|
quarkusio__quarkus
|
extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/FunqyServerResponse.java
|
{
"start": 74,
"end": 167
}
|
interface ____ {
Uni<?> getOutput();
void setOutput(Uni<?> out);
}
|
FunqyServerResponse
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncEndpointTest.java
|
{
"start": 1170,
"end": 2714
}
|
class ____ extends ContextTestSupport {
private static String beforeThreadName;
private static String afterThreadName;
@Test
public void testAsyncEndpoint() throws Exception {
getMockEndpoint("mock:before").expectedBodiesReceived("Hello Camel");
getMockEndpoint("mock:after").expectedBodiesReceived("Bye Camel");
getMockEndpoint("mock:result").expectedBodiesReceived("Bye Camel");
String reply = template.requestBody("direct:start", "Hello Camel", String.class);
assertEquals("Bye Camel", reply);
assertMockEndpointsSatisfied();
assertFalse(beforeThreadName.equalsIgnoreCase(afterThreadName), "Should use different threads");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.addComponent("async", new MyAsyncComponent());
from("direct:start").to("mock:before").to("log:before").process(new Processor() {
public void process(Exchange exchange) {
beforeThreadName = Thread.currentThread().getName();
}
}).to("async:bye:camel").process(new Processor() {
public void process(Exchange exchange) {
afterThreadName = Thread.currentThread().getName();
}
}).to("log:after").to("mock:after").to("mock:result");
}
};
}
}
|
AsyncEndpointTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java
|
{
"start": 1057,
"end": 3872
}
|
class ____ extends AbstractConvertFunction.AbstractEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToIntegerFromBooleanEvaluator.class);
private final EvalOperator.ExpressionEvaluator bool;
public ToIntegerFromBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator bool,
DriverContext driverContext) {
super(driverContext, source);
this.bool = bool;
}
@Override
public EvalOperator.ExpressionEvaluator next() {
return bool;
}
@Override
public Block evalVector(Vector v) {
BooleanVector vector = (BooleanVector) v;
int positionCount = v.getPositionCount();
if (vector.isConstant()) {
return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount);
}
try (IntBlock.Builder builder = driverContext.blockFactory().newIntBlockBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
builder.appendInt(evalValue(vector, p));
}
return builder.build();
}
}
private int evalValue(BooleanVector container, int index) {
boolean value = container.getBoolean(index);
return ToInteger.fromBoolean(value);
}
@Override
public Block evalBlock(Block b) {
BooleanBlock block = (BooleanBlock) b;
int positionCount = block.getPositionCount();
try (IntBlock.Builder builder = driverContext.blockFactory().newIntBlockBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
int valueCount = block.getValueCount(p);
int start = block.getFirstValueIndex(p);
int end = start + valueCount;
boolean positionOpened = false;
boolean valuesAppended = false;
for (int i = start; i < end; i++) {
int value = evalValue(block, i);
if (positionOpened == false && valueCount > 1) {
builder.beginPositionEntry();
positionOpened = true;
}
builder.appendInt(value);
valuesAppended = true;
}
if (valuesAppended == false) {
builder.appendNull();
} else if (positionOpened) {
builder.endPositionEntry();
}
}
return builder.build();
}
}
private int evalValue(BooleanBlock container, int index) {
boolean value = container.getBoolean(index);
return ToInteger.fromBoolean(value);
}
@Override
public String toString() {
return "ToIntegerFromBooleanEvaluator[" + "bool=" + bool + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(bool);
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += bool.baseRamBytesUsed();
return baseRamBytesUsed;
}
public static
|
ToIntegerFromBooleanEvaluator
|
java
|
grpc__grpc-java
|
xds/src/test/java/io/grpc/xds/ClusterImplLoadBalancerTest.java
|
{
"start": 67546,
"end": 68530
}
|
class ____ implements TlsContextManager {
@Override
public SslContextProvider findOrCreateClientSslContextProvider(
UpstreamTlsContext upstreamTlsContext) {
SslContextProvider sslContextProvider = mock(SslContextProvider.class);
when(sslContextProvider.getUpstreamTlsContext()).thenReturn(upstreamTlsContext);
return sslContextProvider;
}
@Override
public SslContextProvider releaseClientSslContextProvider(
SslContextProvider sslContextProvider) {
// no-op
return null;
}
@Override
public SslContextProvider findOrCreateServerSslContextProvider(
DownstreamTlsContext downstreamTlsContext) {
throw new UnsupportedOperationException("should not be called");
}
@Override
public SslContextProvider releaseServerSslContextProvider(
SslContextProvider sslContextProvider) {
throw new UnsupportedOperationException("should not be called");
}
}
}
|
FakeTlsContextManager
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java
|
{
"start": 77050,
"end": 77993
}
|
class ____ extends OptimizerRule<OrderBy> {
@Override
protected LogicalPlan rule(OrderBy ob) {
List<Order> prunedOrders = new ArrayList<>();
for (Order o : ob.order()) {
if (o.child().foldable()) {
prunedOrders.add(o);
}
}
// everything was eliminated, the order isn't needed anymore
if (prunedOrders.size() == ob.order().size()) {
return ob.child();
}
if (prunedOrders.size() > 0) {
List<Order> newOrders = new ArrayList<>(ob.order());
newOrders.removeAll(prunedOrders);
return new OrderBy(ob.source(), ob.child(), newOrders);
}
return ob;
}
}
// NB: it is important to start replacing casts from the bottom to properly replace aliases
public abstract static
|
PruneLiteralsInOrderBy
|
java
|
quarkusio__quarkus
|
extensions/kafka-client/runtime/src/test/java/io/quarkus/kafka/client/serialization/JsonbDeserializerTest.java
|
{
"start": 350,
"end": 1762
}
|
class ____ {
@Test
void shouldDeserializeEntity() {
MyEntity expected = new MyEntity(1, "entity1");
JsonbDeserializer<MyEntity> deserializer = new JsonbDeserializer<>(MyEntity.class);
MyEntity actual = deserializer.deserialize("topic", "{\"id\":1,\"name\":\"entity1\"}".getBytes());
assertNotNull(actual);
assertEquals(expected, actual);
}
@Test
void shouldDeserializeListOfEntities() {
Type listType = new ArrayList<MyEntity>() {
}.getClass().getGenericSuperclass();
JsonbDeserializer<List<MyEntity>> deserializer = new JsonbDeserializer<>(listType);
List<MyEntity> actuals = deserializer.deserialize("topic",
"[{\"id\":1,\"name\":\"entity1\"},{\"id\":2,\"name\":\"entity2\"}]".getBytes());
assertNotNull(actuals);
assertEquals(2, actuals.size());
}
@Test
void shouldDeserializeNullAsNullString() {
JsonbDeserializer<MyEntity> deserializer = new JsonbDeserializer<>(MyEntity.class);
MyEntity results = deserializer.deserialize("topic", "null".getBytes());
assertNull(results);
}
@Test
void shouldDeserializeNullAsNull() {
JsonbDeserializer<MyEntity> deserializer = new JsonbDeserializer<>(MyEntity.class);
MyEntity results = deserializer.deserialize("topic", null);
assertNull(results);
}
}
|
JsonbDeserializerTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedRWVectorsFormat.java
|
{
"start": 1344,
"end": 2133
}
|
class ____ extends ES816BinaryQuantizedVectorsFormat {
private static final FlatVectorsFormat rawVectorFormat = new Lucene99FlatVectorsFormat(
FlatVectorScorerUtil.getLucene99FlatVectorsScorer()
);
private static final ES816BinaryFlatRWVectorsScorer scorer = new ES816BinaryFlatRWVectorsScorer(
FlatVectorScorerUtil.getLucene99FlatVectorsScorer()
);
/** Creates a new instance with the default number of vectors per cluster. */
public ES816BinaryQuantizedRWVectorsFormat() {
super();
}
@Override
public FlatVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException {
return new ES816BinaryQuantizedVectorsWriter(scorer, rawVectorFormat.fieldsWriter(state), state);
}
}
|
ES816BinaryQuantizedRWVectorsFormat
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy-jaxb/deployment/src/test/java/io/quarkus/resteasy/jaxb/deployment/ConsumesXMLTestCase.java
|
{
"start": 447,
"end": 1112
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Bar.class, FooResource.class));
@Test
public void testConsumesXML() {
RestAssured.given()
.contentType(ContentType.XML)
.body(new Bar("open", "bar"))
.when().post("/foo")
.then()
.log().ifValidationFails()
.statusCode(200)
.contentType(MediaType.TEXT_PLAIN)
.body(Matchers.is("open bar"));
}
@Path("/foo")
public static
|
ConsumesXMLTestCase
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/InvalidJavaTimeConstantTest.java
|
{
"start": 934,
"end": 1380
}
|
class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(InvalidJavaTimeConstant.class, getClass());
@Test
public void cornerCases() {
compilationHelper
.addSourceLines(
"test/TestCase.java",
"""
package test;
import java.time.LocalDateTime;
import java.time.LocalTime;
public
|
InvalidJavaTimeConstantTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java
|
{
"start": 1160,
"end": 4925
}
|
class ____ extends AbstractConvertFunction.AbstractEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToStringFromGeoPointEvaluator.class);
private final EvalOperator.ExpressionEvaluator wkb;
public ToStringFromGeoPointEvaluator(Source source, EvalOperator.ExpressionEvaluator wkb,
DriverContext driverContext) {
super(driverContext, source);
this.wkb = wkb;
}
@Override
public EvalOperator.ExpressionEvaluator next() {
return wkb;
}
@Override
public Block evalVector(Vector v) {
BytesRefVector vector = (BytesRefVector) v;
OrdinalBytesRefVector ordinals = vector.asOrdinals();
if (ordinals != null) {
return evalOrdinals(ordinals);
}
int positionCount = v.getPositionCount();
BytesRef scratchPad = new BytesRef();
if (vector.isConstant()) {
return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount);
}
try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
builder.appendBytesRef(evalValue(vector, p, scratchPad));
}
return builder.build();
}
}
private BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) {
BytesRef value = container.getBytesRef(index, scratchPad);
return ToString.fromGeoPoint(value);
}
@Override
public Block evalBlock(Block b) {
BytesRefBlock block = (BytesRefBlock) b;
int positionCount = block.getPositionCount();
try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef scratchPad = new BytesRef();
for (int p = 0; p < positionCount; p++) {
int valueCount = block.getValueCount(p);
int start = block.getFirstValueIndex(p);
int end = start + valueCount;
boolean positionOpened = false;
boolean valuesAppended = false;
for (int i = start; i < end; i++) {
BytesRef value = evalValue(block, i, scratchPad);
if (positionOpened == false && valueCount > 1) {
builder.beginPositionEntry();
positionOpened = true;
}
builder.appendBytesRef(value);
valuesAppended = true;
}
if (valuesAppended == false) {
builder.appendNull();
} else if (positionOpened) {
builder.endPositionEntry();
}
}
return builder.build();
}
}
private BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) {
BytesRef value = container.getBytesRef(index, scratchPad);
return ToString.fromGeoPoint(value);
}
private Block evalOrdinals(OrdinalBytesRefVector v) {
int positionCount = v.getDictionaryVector().getPositionCount();
BytesRef scratchPad = new BytesRef();
try (BytesRefVector.Builder builder = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
builder.appendBytesRef(evalValue(v.getDictionaryVector(), p, scratchPad));
}
IntVector ordinals = v.getOrdinalsVector();
ordinals.incRef();
return new OrdinalBytesRefVector(ordinals, builder.build()).asBlock();
}
}
@Override
public String toString() {
return "ToStringFromGeoPointEvaluator[" + "wkb=" + wkb + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(wkb);
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += wkb.baseRamBytesUsed();
return baseRamBytesUsed;
}
public static
|
ToStringFromGeoPointEvaluator
|
java
|
google__guava
|
guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java
|
{
"start": 7160,
"end": 7320
}
|
class ____ {
@Keep
public static void christenPoodle(String unused) {
throw new UnsupportedOperationException();
}
}
private static
|
ThrowsUoe
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryBoxedVariableTest.java
|
{
"start": 17167,
"end": 17405
}
|
class ____ {
private static final Integer FOO = 42;
public ImmutableList<Integer> foos() {
return ImmutableList.of(FOO);
}
}
""")
.doTest();
}
}
|
Test
|
java
|
quarkusio__quarkus
|
extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/RestClientMetricsFilter.java
|
{
"start": 895,
"end": 4191
}
|
class ____ implements ClientRequestFilter, ClientResponseFilter {
private final static String REQUEST_METRIC_PROPERTY = "restClientMetrics";
private final MeterRegistry registry = Metrics.globalRegistry;
private final HttpBinderConfiguration httpMetricsConfig;
private final Meter.MeterProvider<Timer> timer;
// RESTEasy requires no-arg constructor for CDI injection: https://issues.redhat.com/browse/RESTEASY-1538
// In the classic Rest Client this is the constructor called whereas in the Reactive one,
// the constructor using HttpBinderConfiguration is called.
public RestClientMetricsFilter() {
this(Arc.container().instance(HttpBinderConfiguration.class).get());
}
@Inject
public RestClientMetricsFilter(final HttpBinderConfiguration httpMetricsConfig) {
this.httpMetricsConfig = httpMetricsConfig;
timer = Timer.builder(httpMetricsConfig.getHttpClientRequestsName())
.withRegistry(registry);
}
@Override
public void filter(final ClientRequestContext requestContext) {
if (!httpMetricsConfig.isClientEnabled()) {
return;
}
RequestMetricInfo requestMetric = new RestClientMetricInfo(requestContext);
requestMetric.setSample(Timer.start(registry));
requestContext.setProperty(REQUEST_METRIC_PROPERTY, requestMetric);
}
@Override
public void filter(final ClientRequestContext requestContext, final ClientResponseContext responseContext) {
if (!httpMetricsConfig.isClientEnabled()) {
return;
}
RequestMetricInfo requestMetric = getRequestMetric(requestContext);
if (requestMetric != null) {
String templatePath = (String) requestContext.getProperty("UrlPathTemplate");
String requestPath = requestMetric.getNormalizedUriPath(
httpMetricsConfig.getClientMatchPatterns(),
httpMetricsConfig.getClientIgnorePatterns(),
templatePath == null ? requestContext.getUri().getPath() : templatePath);
if (requestPath != null) {
Timer.Sample sample = requestMetric.getSample();
int statusCode = responseContext.getStatus();
sample.stop(timer
.withTags(Tags.of(
HttpCommonTags.method(requestContext.getMethod()),
HttpCommonTags.uri(requestPath, requestContext.getUri().getPath(), statusCode,
httpMetricsConfig.isClientSuppress4xxErrors()),
HttpCommonTags.outcome(statusCode),
HttpCommonTags.status(statusCode),
clientName(requestContext))));
}
}
}
private RequestMetricInfo getRequestMetric(ClientRequestContext requestContext) {
return (RequestMetricInfo) requestContext.getProperty(REQUEST_METRIC_PROPERTY);
}
private Tag clientName(ClientRequestContext requestContext) {
String host = requestContext.getUri().getHost();
if (host == null) {
host = "none";
}
return Tag.of("clientName", host);
}
static
|
RestClientMetricsFilter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/boot/BootLoggingTests.java
|
{
"start": 1432,
"end": 2920
}
|
class ____ {
@Test
@ServiceRegistry( settings = @Setting( name = AvailableSettings.ALLOW_METADATA_ON_BOOT, value = "false" ) )
@DomainModel( standardModels = StandardDomainModel.HELPDESK )
@SessionFactory( exportSchema = false, generateStatistics = true )
@RequiresDialect( H2Dialect.class )
void testNoJdbcAccess(MessageKeyWatcher dbInfoLoggingWatcher, ServiceRegistryScope registryScope, SessionFactoryScope sessionFactoryScope) {
// make sure we get the db-info logged
assertThat( dbInfoLoggingWatcher.wasTriggered() ).isTrue();
// make sure it was logged as we expect
final ConfigurationService configurationService = registryScope.getRegistry().requireService( ConfigurationService.class );
assertThat( dbInfoLoggingWatcher.getTriggeredMessages() ).hasSize( 1 );
final String loggedMessage = dbInfoLoggingWatcher.getTriggeredMessages().get( 0 );
assertThat( loggedMessage ).contains( "Database JDBC URL [" + configurationService.getSettings().get( JdbcSettings.URL ) );
assertThat( loggedMessage ).contains( "Database driver: " + configurationService.getSettings().get( JdbcSettings.DRIVER ) );
assertThat( loggedMessage ).contains( "Maximum pool size: " + configurationService.getSettings().get( JdbcSettings.POOL_SIZE ) );
// and make sure we did not connect to the database
final StatisticsImplementor statistics = sessionFactoryScope.getSessionFactory().getStatistics();
assertThat( statistics.getConnectCount() ).isEqualTo( 0 );
}
}
|
BootLoggingTests
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/geo/ReactiveGeoCommands.java
|
{
"start": 611,
"end": 14373
}
|
interface ____<K, V> extends ReactiveRedisCommands {
/**
* Execute the command <a href="https://redis.io/commands/geoadd">GEOADD</a>.
* Summary: Add one geospatial item in the geospatial index represented using a sorted set
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param longitude the longitude coordinate according to WGS84.
* @param latitude the latitude coordinate according to WGS84.
* @param member the member to add.
* @return {@code true} if the geospatial item was added, {@code false} otherwise
**/
Uni<Boolean> geoadd(K key, double longitude, double latitude, V member);
/**
* Execute the command <a href="https://redis.io/commands/geoadd">GEOADD</a>.
* Summary: Add one geospatial item in the geospatial index represented using a sorted set
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param position the geo position
* @param member the member to add.
* @return {@code true} if the geospatial item was added, {@code false} otherwise
**/
Uni<Boolean> geoadd(K key, GeoPosition position, V member);
/**
* Execute the command <a href="https://redis.io/commands/geoadd">GEOADD</a>.
* Summary: Add one geospatial item in the geospatial index represented using a sorted set
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param item the item to add
* @return {@code true} if the geospatial item was added, {@code false} otherwise
**/
Uni<Boolean> geoadd(K key, GeoItem<V> item);
/**
* Execute the command <a href="https://redis.io/commands/geoadd">GEOADD</a>.
* Summary: Add one or more geospatial items in the geospatial index represented using a sorted set
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param items the geo-item triplets containing the longitude, latitude and name / value
* @return the number of elements added to the sorted set (excluding score updates).
**/
Uni<Integer> geoadd(K key, GeoItem<V>... items);
/**
* Execute the command <a href="https://redis.io/commands/geoadd">GEOADD</a>.
* Summary: Add one geospatial item in the geospatial index represented using a sorted set
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param longitude the longitude coordinate according to WGS84.
* @param latitude the latitude coordinate according to WGS84.
* @param member the member to add.
* @param args additional arguments.
* @return {@code true} if the geospatial item was added, {@code false} otherwise
**/
Uni<Boolean> geoadd(K key, double longitude, double latitude, V member, GeoAddArgs args);
/**
* Execute the command <a href="https://redis.io/commands/geoadd">GEOADD</a>.
* Summary: Add one geospatial item in the geospatial index represented using a sorted set
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param item the item to add
* @param args additional arguments.
* @return {@code true} if the geospatial item was added, {@code false} otherwise
**/
Uni<Boolean> geoadd(K key, GeoItem<V> item, GeoAddArgs args);
/**
* Execute the command <a href="https://redis.io/commands/geoadd">GEOADD</a>.
* Summary: Add one or more geospatial items in the geospatial index represented using a sorted set
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param args additional arguments.
* @param items the items containing the longitude, latitude and name / value
* @return the number of elements added to the sorted set (excluding score updates). If the {@code CH} option is
* specified, the number of elements that were changed (added or updated).
**/
Uni<Integer> geoadd(K key, GeoAddArgs args, GeoItem<V>... items);
/**
* Execute the command <a href="https://redis.io/commands/geodist">GEODIST</a>.
* Summary: Returns the distance between two members of a geospatial index
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param from from member
* @param to to member
* @param unit the unit
* @return The command returns the distance as a double in the specified unit, or {@code empty} if one or both the
* elements are missing.
**/
Uni<Double> geodist(K key, V from, V to, GeoUnit unit);
/**
* Execute the command <a href="https://redis.io/commands/geohash">GEOHASH</a>.
* Summary: Returns members of a geospatial index as standard geohash strings
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param members the members
* @return The command returns an array where each element is the Geohash corresponding to each member name passed
* as argument to the command.
**/
Uni<List<String>> geohash(K key, V... members);
/**
* Execute the command <a href="https://redis.io/commands/geopos">GEOPOS</a>.
* Summary: Returns longitude and latitude of members of a geospatial index
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param members the items
* @return The command returns an array where each element is a{@link GeoPosition} representing longitude and
* latitude (x,y) of each member name passed as argument to the command. Non-existing elements are reported as
* {@code null} elements.
**/
Uni<List<GeoPosition>> geopos(K key, V... members);
/**
* Execute the command <a href="https://redis.io/commands/georadius">GEORADIUS</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* poUni<Integer>
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param longitude the longitude
* @param latitude the latitude
* @param radius the radius
* @param unit the unit
* @return the list of values.
* @deprecated See https://redis.io/commands/georadius
**/
@Deprecated
Uni<Set<V>> georadius(K key, double longitude, double latitude, double radius, GeoUnit unit);
/**
* Execute the command <a href="https://redis.io/commands/georadius">GEORADIUS</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* poUni<Integer>
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param position the position
* @param radius the radius
* @param unit the unit
* @return the list of values.
* @deprecated See https://redis.io/commands/georadius
**/
@Deprecated
Uni<Set<V>> georadius(K key, GeoPosition position, double radius, GeoUnit unit);
/**
* Execute the command <a href="https://redis.io/commands/georadius">GEORADIUS</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* poUni<Integer>
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param longitude the longitude
* @param latitude the latitude
* @param radius the radius
* @param unit the unit
* @param geoArgs the extra arguments of the {@code GEORADIUS} command
* @return the list of {@link GeoValue}. Only the field requested using {@code geoArgs} are populated in the returned
* {@link GeoValue}.
* @deprecated See https://redis.io/commands/georadius
**/
@Deprecated
Uni<List<GeoValue<V>>> georadius(K key, double longitude, double latitude, double radius, GeoUnit unit,
GeoRadiusArgs geoArgs);
/**
* Execute the command <a href="https://redis.io/commands/georadius">GEORADIUS</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* poUni<Integer>
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param position the position
* @param radius the radius
* @param unit the unit
* @param geoArgs the extra arguments of the {@code GEORADIUS} command
* @return the list of {@link GeoValue}. Only the field requested using {@code geoArgs} are populated in the returned
* {@link GeoValue}.
* @deprecated See https://redis.io/commands/georadius
**/
@Deprecated
Uni<List<GeoValue<V>>> georadius(K key, GeoPosition position, double radius, GeoUnit unit, GeoRadiusArgs geoArgs);
/**
* Execute the command <a href="https://redis.io/commands/georadius">GEORADIUS</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* poUni<Integer>.
* It also stores the results in a sorted set.
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param longitude the longitude
* @param latitude the latitude
* @param radius the radius
* @param unit the unit
* @param geoArgs the extra {@code STORE} arguments of the {@code GEORADIUS} command
* @return The number of items contained in the result written at the configured key.
* @deprecated See https://redis.io/commands/georadius
**/
@Deprecated
Uni<Long> georadius(K key, double longitude, double latitude, double radius, GeoUnit unit, GeoRadiusStoreArgs<K> geoArgs);
/**
* Execute the command <a href="https://redis.io/commands/georadius">GEORADIUS</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* poUni<Integer>.
* It also stores the results in a sorted set.
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param position the position
* @param radius the radius
* @param unit the unit
* @param geoArgs the extra {@code STORE} arguments of the {@code GEORADIUS} command
* @return The number of items contained in the result written at the configured key.
* @deprecated See https://redis.io/commands/georadius
**/
@Deprecated
Uni<Long> georadius(K key, GeoPosition position, double radius, GeoUnit unit, GeoRadiusStoreArgs<K> geoArgs);
/**
* Execute the command <a href="https://redis.io/commands/georadiusbymember">GEORADIUSBYMEMBER</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* member
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param member the member
* @param distance the max distance
* @return the set of values
* @deprecated See https://redis.io/commands/georadiusbymember
**/
@Deprecated
Uni<Set<V>> georadiusbymember(K key, V member, double distance, GeoUnit unit);
/**
* Execute the command <a href="https://redis.io/commands/georadiusbymember">GEORADIUSBYMEMBER</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* member
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param member the member
* @param distance the max distance
* @param geoArgs the extra arguments of the {@code GEORADIUS} command
* @return the list of {@link GeoValue}. Only the field requested using {@code geoArgs} are populated in the
* returned {@link GeoValue values}.
* @deprecated See https://redis.io/commands/georadiusbymember
**/
@Deprecated
Uni<List<GeoValue<V>>> georadiusbymember(K key, V member, double distance, GeoUnit unit, GeoRadiusArgs geoArgs);
/**
* Execute the command <a href="https://redis.io/commands/georadiusbymember">GEORADIUSBYMEMBER</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members matching a given maximum distance from a
* member.
* It also stores the results in a sorted set.
* Group: geo
* Requires Redis 3.2.0
*
* @param key the key
* @param member the member
* @param distance the max distance
* @param geoArgs the extra arguments of the {@code GEORADIUS} command
* @return The number of items contained in the result written at the configured key.
* @deprecated See https://redis.io/commands/georadiusbymember
**/
@Deprecated
Uni<Long> georadiusbymember(K key, V member, double distance, GeoUnit unit, GeoRadiusStoreArgs<K> geoArgs);
/**
* Execute the command <a href="https://redis.io/commands/geosearch">GEOSEARCH</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members inside an area of a box or a circle.
* Group: geo
* Requires Redis 6.2.0
*
* @return the list of {@code GeoValue<V>>}. The populated data depends on the parameters configured in {@code args}.
**/
Uni<List<GeoValue<V>>> geosearch(K key, GeoSearchArgs<V> args);
/**
* Execute the command <a href="https://redis.io/commands/geosearchstore">GEOSEARCHSTORE</a>.
* Summary: Query a sorted set representing a geospatial index to fetch members inside an area of a box or a circle,
* and store the result in another key.
* Group: geo
* Requires Redis 6.2.0
*
* @return the number of elements in the resulting set.
**/
Uni<Long> geosearchstore(K destination, K key, GeoSearchStoreArgs<V> args, boolean storeDist);
}
|
ReactiveGeoCommands
|
java
|
junit-team__junit5
|
junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/FilePosition.java
|
{
"start": 1334,
"end": 5489
}
|
class ____ implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(FilePosition.class);
/**
* Create a new {@code FilePosition} using the supplied {@code line} number
* and an undefined column number.
*
* @param line the line number; must be greater than zero
* @return a {@link FilePosition} with the given line number
*/
public static FilePosition from(int line) {
return new FilePosition(line);
}
/**
* Create a new {@code FilePosition} using the supplied {@code line} and
* {@code column} numbers.
*
* @param line the line number; must be greater than zero
* @param column the column number; must be greater than zero
* @return a {@link FilePosition} with the given line and column numbers
*/
public static FilePosition from(int line, int column) {
return new FilePosition(line, column);
}
/**
* Create an optional {@code FilePosition} by parsing the supplied
* {@code query} string.
*
* <p>Examples of valid {@code query} strings:
* <ul>
* <li>{@code "line=23"}</li>
* <li>{@code "line=23&column=42"}</li>
* </ul>
*
* @param query the query string; may be {@code null}
* @return an {@link Optional} containing a {@link FilePosition} with
* the parsed line and column numbers; never {@code null} but potentially
* empty
* @since 1.3
* @see #from(int)
* @see #from(int, int)
*/
public static Optional<FilePosition> fromQuery(String query) {
FilePosition result = null;
Integer line = null;
Integer column = null;
if (StringUtils.isNotBlank(query)) {
try {
for (String pair : query.split("&")) {
String[] data = pair.split("=");
if (data.length == 2) {
String key = data[0];
if (line == null && "line".equals(key)) {
line = Integer.valueOf(data[1]);
}
else if (column == null && "column".equals(key)) {
column = Integer.valueOf(data[1]);
}
}
// Already found what we're looking for?
if (line != null && column != null) {
break;
}
}
}
catch (IllegalArgumentException ex) {
logger.debug(ex, () -> "Failed to parse 'line' and/or 'column' from query string: " + query);
// fall-through and continue
}
if (line != null) {
result = column == null ? new FilePosition(line) : new FilePosition(line, column);
}
}
return Optional.ofNullable(result);
}
private final int line;
private final @Nullable Integer column;
private FilePosition(int line) {
Preconditions.condition(line > 0, "line number must be greater than zero");
this.line = line;
this.column = null;
}
private FilePosition(int line, int column) {
Preconditions.condition(line > 0, "line number must be greater than zero");
Preconditions.condition(column > 0, "column number must be greater than zero");
this.line = line;
this.column = column;
}
/**
* Get the line number of this {@code FilePosition}.
*
* @return the line number
*/
public int getLine() {
return this.line;
}
/**
* Get the column number of this {@code FilePosition}, if available.
*
* @return an {@code Optional} containing the column number; never
* {@code null} but potentially empty
*/
public Optional<Integer> getColumn() {
return Optional.ofNullable(this.column);
}
String toQueryPart() {
StringBuilder builder = new StringBuilder("line=").append(this.line);
if (this.column != null) {
builder.append("&column=").append(this.column);
}
return builder.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FilePosition that = (FilePosition) o;
return (this.line == that.line) && Objects.equals(this.column, that.column);
}
@Override
public int hashCode() {
return Objects.hash(this.line, this.column);
}
@Override
public String toString() {
// @formatter:off
return new ToStringBuilder(this)
.append("line", this.line)
.append("column", getColumn().orElse(-1))
.toString();
// @formatter:on
}
}
|
FilePosition
|
java
|
apache__logging-log4j2
|
log4j-api-test/src/test/java/org/apache/logging/log4j/util/LoaderUtilSecurityManagerTest.java
|
{
"start": 1393,
"end": 1916
}
|
class ____ extends SecurityManager {
@Override
public void checkPermission(final Permission perm) {
if (perm.equals(LoaderUtil.GET_CLASS_LOADER)) {
throw new SecurityException("disabled");
}
}
}
@Test
public void canGetClassLoaderThroughPrivileges() {
assertFalse(LoaderUtil.GET_CLASS_LOADER_DISABLED);
assertDoesNotThrow(() -> LoaderUtil.getClassLoader(LoaderUtilSecurityManagerTest.class, String.class));
}
}
|
TestSecurityManager
|
java
|
spring-projects__spring-boot
|
module/spring-boot-quartz/src/main/java/org/springframework/boot/quartz/actuate/endpoint/QuartzEndpoint.java
|
{
"start": 13735,
"end": 14271
}
|
class ____ implements OperationResponseBody {
private final String group;
private final Map<String, QuartzJobSummaryDescriptor> jobs;
QuartzJobGroupSummaryDescriptor(String group, Map<String, QuartzJobSummaryDescriptor> jobs) {
this.group = group;
this.jobs = jobs;
}
public String getGroup() {
return this.group;
}
public Map<String, QuartzJobSummaryDescriptor> getJobs() {
return this.jobs;
}
}
/**
* Description of a {@link Job Quartz Job}.
*/
public static final
|
QuartzJobGroupSummaryDescriptor
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/CapacitySchedulerPlanFollower.java
|
{
"start": 2690,
"end": 6037
}
|
class ____ extends AbstractSchedulerPlanFollower {
private static final Logger LOG = LoggerFactory
.getLogger(CapacitySchedulerPlanFollower.class);
private CapacityScheduler cs;
@Override
public void init(Clock clock, ResourceScheduler sched, Collection<Plan> plans) {
super.init(clock, sched, plans);
LOG.info("Initializing Plan Follower Policy:"
+ this.getClass().getCanonicalName());
if (!(sched instanceof CapacityScheduler)) {
throw new YarnRuntimeException(
"CapacitySchedulerPlanFollower can only work with CapacityScheduler");
}
this.cs = (CapacityScheduler) sched;
}
@Override
protected Queue getPlanQueue(String planQueueName) {
CSQueue queue = cs.getQueue(planQueueName);
if (!(queue instanceof PlanQueue)) {
LOG.error("The Plan is not an PlanQueue!");
return null;
}
return queue;
}
@Override
protected List<? extends Queue> getChildReservationQueues(Queue queue) {
PlanQueue planQueue = (PlanQueue)queue;
List<CSQueue> childQueues = planQueue.getChildQueues();
return childQueues;
}
@Override
protected void addReservationQueue(
String planQueueName, Queue queue, String currResId) {
PlanQueue planQueue = (PlanQueue)queue;
try {
ReservationQueue resQueue =
new ReservationQueue(cs.getQueueContext(), currResId, planQueue);
cs.addQueue(resQueue);
} catch (SchedulerDynamicEditException e) {
LOG.warn(
"Exception while trying to activate reservation: {} for plan: {}",
currResId, planQueueName, e);
} catch (IOException e) {
LOG.warn(
"Exception while trying to activate reservation: {} for plan: {}",
currResId, planQueueName, e);
}
}
@Override
protected void createDefaultReservationQueue(
String planQueueName, Queue queue, String defReservationId) {
PlanQueue planQueue = (PlanQueue)queue;
if (cs.getQueue(defReservationId) == null) {
try {
ReservationQueue defQueue =
new ReservationQueue(cs.getQueueContext(), defReservationId, planQueue);
cs.addQueue(defQueue);
} catch (SchedulerDynamicEditException e) {
LOG.warn(
"Exception while trying to create default reservation queue for plan: {}",
planQueueName, e);
} catch (IOException e) {
LOG.warn(
"Exception while trying to create default reservation queue for " +
"plan: {}",
planQueueName, e);
}
}
}
@Override
protected Resource getPlanResources(
Plan plan, Queue queue, Resource clusterResources) {
PlanQueue planQueue = (PlanQueue)queue;
float planAbsCap = planQueue.getAbsoluteCapacity();
Resource planResources = Resources.multiply(clusterResources, planAbsCap);
plan.setTotalCapacity(planResources);
return planResources;
}
@Override
protected Resource getReservationQueueResourceIfExists(Plan plan,
ReservationId reservationId) {
CSQueue resQueue = cs.getQueue(reservationId.toString());
Resource reservationResource = null;
if (resQueue != null) {
reservationResource = Resources.multiply(cs.getClusterResource(),
resQueue.getAbsoluteCapacity());
}
return reservationResource;
}
}
|
CapacitySchedulerPlanFollower
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4453PluginVersionFromLifecycleMappingTest.java
|
{
"start": 1040,
"end": 1865
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that plugin versions given by the lifecycle mapping are respected, even if those differ from the version
* defined in the plugin management section inherited from the super POM.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4453");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("process-resources");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyFilePresent("target/resources-resources.txt");
}
}
|
MavenITmng4453PluginVersionFromLifecycleMappingTest
|
java
|
apache__camel
|
components/camel-activemq/src/main/java/org/apache/camel/component/activemq/converter/ActiveMQConverter.java
|
{
"start": 992,
"end": 1625
}
|
class ____ {
/**
* Converts a URL in ActiveMQ syntax to a destination such as to support "queue://foo.bar" or 'topic://bar.whatnot".
* Things default to queues if no scheme. This allows ActiveMQ destinations to be passed around as Strings and
* converted back again.
*
* @param name is the name of the queue or the full URI using prefixes queue:// or topic://
* @return the ActiveMQ destination
*/
@Converter
public ActiveMQDestination toDestination(String name) {
return ActiveMQDestination.createDestination(name, ActiveMQDestination.QUEUE_TYPE);
}
}
|
ActiveMQConverter
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/metrics/MemoryTrackingTDigestArrays.java
|
{
"start": 9417,
"end": 11886
}
|
class ____ extends AbstractMemoryTrackingArray implements TDigestLongArray {
static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(MemoryTrackingTDigestLongArray.class);
private long[] array;
private int size;
public MemoryTrackingTDigestLongArray(CircuitBreaker breaker, int initialSize) {
this(breaker, new long[initialSize]);
}
public MemoryTrackingTDigestLongArray(CircuitBreaker breaker, long[] array) {
super(breaker);
this.array = array;
this.size = array.length;
}
public static long estimatedRamBytesUsed(int size) {
return SHALLOW_SIZE + estimatedArraySize(size, Long.BYTES);
}
@Override
public long ramBytesUsed() {
return estimatedRamBytesUsed(array.length);
}
@Override
public int size() {
return size;
}
@Override
public long get(int index) {
assert index >= 0 && index < size;
return array[index];
}
@Override
public void set(int index, long value) {
assert index >= 0 && index < size;
array[index] = value;
}
@Override
public void resize(int newSize) {
ensureCapacity(newSize);
if (newSize > size) {
Arrays.fill(array, size, newSize, 0);
}
size = newSize;
}
private void ensureCapacity(int requiredCapacity) {
if (requiredCapacity > array.length) {
long[] oldArray = array;
// Used for used bytes assertion
long oldRamBytesUsed = ramBytesUsed();
long oldArraySize = RamUsageEstimator.sizeOf(oldArray);
int newSize = ArrayUtil.oversize(requiredCapacity, Long.BYTES);
long newArraySize = estimatedArraySize(newSize, Long.BYTES);
breaker.addEstimateBytesAndMaybeBreak(newArraySize, "tdigest-new-capacity-long-array");
array = Arrays.copyOf(array, newSize);
breaker.addWithoutBreaking(-RamUsageEstimator.sizeOf(oldArray));
assert ramBytesUsed() - oldRamBytesUsed == newArraySize - oldArraySize
: "ramBytesUsed() should be aligned with manual array calculations";
}
}
}
public static
|
MemoryTrackingTDigestLongArray
|
java
|
elastic__elasticsearch
|
modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java
|
{
"start": 1115,
"end": 2311
}
|
class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(
new Route(GET, "/_render/template"),
new Route(POST, "/_render/template"),
new Route(GET, "/_render/template/{id}"),
new Route(POST, "/_render/template/{id}")
);
}
@Override
public String getName() {
return "render_search_template_action";
}
@Override
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
// Creates the render template request
SearchTemplateRequest renderRequest;
try (XContentParser parser = request.contentOrSourceParamParser()) {
renderRequest = SearchTemplateRequest.fromXContent(parser);
}
renderRequest.setSimulate(true);
String id = request.param("id");
if (id != null) {
renderRequest.setScriptType(ScriptType.STORED);
renderRequest.setScript(id);
}
return channel -> client.execute(MustachePlugin.SEARCH_TEMPLATE_ACTION, renderRequest, new RestToXContentListener<>(channel));
}
}
|
RestRenderSearchTemplateAction
|
java
|
apache__kafka
|
raft/src/main/java/org/apache/kafka/raft/RaftClient.java
|
{
"start": 1463,
"end": 1516
}
|
interface ____<T> extends AutoCloseable {
|
RaftClient
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/cors/reactive/UrlBasedCorsConfigurationSource.java
|
{
"start": 1280,
"end": 2862
}
|
class ____ implements CorsConfigurationSource {
private final PathPatternParser patternParser;
private final Map<PathPattern, CorsConfiguration> corsConfigurations = new LinkedHashMap<>();
/**
* Construct a new {@code UrlBasedCorsConfigurationSource} instance with default
* {@code PathPatternParser}.
* @since 5.0.6
*/
public UrlBasedCorsConfigurationSource() {
this(PathPatternParser.defaultInstance);
}
/**
* Construct a new {@code UrlBasedCorsConfigurationSource} instance from the supplied
* {@code PathPatternParser}.
*/
public UrlBasedCorsConfigurationSource(PathPatternParser patternParser) {
this.patternParser = patternParser;
}
/**
* Set CORS configuration based on URL patterns.
*/
public void setCorsConfigurations(@Nullable Map<String, CorsConfiguration> configMap) {
this.corsConfigurations.clear();
if (configMap != null) {
configMap.forEach(this::registerCorsConfiguration);
}
}
/**
* Register a {@link CorsConfiguration} for the specified path pattern.
*/
public void registerCorsConfiguration(String path, CorsConfiguration config) {
this.corsConfigurations.put(this.patternParser.parse(path), config);
}
@Override
public @Nullable CorsConfiguration getCorsConfiguration(ServerWebExchange exchange) {
PathContainer path = exchange.getRequest().getPath().pathWithinApplication();
for (Map.Entry<PathPattern, CorsConfiguration> entry : this.corsConfigurations.entrySet()) {
if (entry.getKey().matches(path)) {
return entry.getValue();
}
}
return null;
}
}
|
UrlBasedCorsConfigurationSource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/registry/classloading/internal/AggregatedServiceLoader.java
|
{
"start": 1780,
"end": 2028
}
|
class ____.
* <p>
* This could result in duplicates, so we take specific care to avoid using the same service provider twice.
* See {@link #getAll()}.
* <p>
* Note that, in the worst case,
* the service retrieved from each individual
|
loader
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/builder/lifecycle/Item.java
|
{
"start": 505,
"end": 746
}
|
class ____ {
private String name;
public Builder name(String name) {
this.name = name;
return this;
}
public Item create() {
return new Item( this );
}
}
}
|
Builder
|
java
|
spring-projects__spring-framework
|
spring-orm/src/test/java/org/springframework/orm/jpa/LocalContainerEntityManagerFactoryBeanTests.java
|
{
"start": 12423,
"end": 13501
}
|
class ____ implements PersistenceProvider {
PersistenceUnitInfo actualPui;
Map actualProps;
@Override
public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo pui, Map map) {
actualPui = pui;
actualProps = map;
return mockEmf;
}
@Override
public EntityManagerFactory createEntityManagerFactory(String emfName, Map properties) {
throw new UnsupportedOperationException();
}
@Override
public EntityManagerFactory createEntityManagerFactory(PersistenceConfiguration persistenceConfiguration) {
throw new UnsupportedOperationException();
}
@Override
public ProviderUtil getProviderUtil() {
throw new UnsupportedOperationException();
}
// JPA 2.1 method
@Override
public void generateSchema(PersistenceUnitInfo persistenceUnitInfo, Map map) {
throw new UnsupportedOperationException();
}
// JPA 2.1 method
@Override
public boolean generateSchema(String persistenceUnitName, Map map) {
throw new UnsupportedOperationException();
}
}
private static
|
DummyContainerPersistenceProvider
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ComponentScanAnnotationIntegrationTests.java
|
{
"start": 15541,
"end": 15762
}
|
interface ____ {
@AliasFor(annotation = ComponentScan.class)
String[] basePackages() default {};
}
@Configuration
@ComponentScan
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@
|
ComposedConfiguration
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-spring/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/support/spring/RequestBodyArgumentResolver.java
|
{
"start": 1629,
"end": 3471
}
|
class ____ extends AbstractSpringArgumentResolver {
@Override
public Class<Annotation> accept() {
return Annotations.RequestBody.type();
}
@Override
protected ParamType getParamType(NamedValueMeta meta) {
return ParamType.Body;
}
@Override
protected NamedValueMeta createNamedValueMeta(ParameterMeta param, AnnotationMeta<Annotation> anno) {
return new NamedValueMeta(null, Helper.isRequired(anno));
}
@Override
protected Object resolveValue(NamedValueMeta meta, HttpRequest request, HttpResponse response) {
if (RequestUtils.isFormOrMultiPart(request)) {
if (meta.parameter().isSimple()) {
return request.formParameter(meta.name());
}
return meta.parameter().bind(request, response);
}
return RequestUtils.decodeBody(request, meta.genericType());
}
@Override
protected Object resolveCollectionValue(NamedValueMeta meta, HttpRequest request, HttpResponse response) {
Class<?> type = meta.type();
if (type == byte[].class) {
try {
return StreamUtils.readBytes(request.inputStream());
} catch (IOException e) {
throw new RestException(e);
}
}
if (RequestUtils.isFormOrMultiPart(request)) {
return request.formParameterValues(meta.name());
}
return RequestUtils.decodeBody(request, meta.genericType());
}
@Override
protected Object resolveMapValue(NamedValueMeta meta, HttpRequest request, HttpResponse response) {
if (RequestUtils.isFormOrMultiPart(request)) {
return RequestUtils.getFormParametersMap(request);
}
return RequestUtils.decodeBody(request, meta.genericType());
}
}
|
RequestBodyArgumentResolver
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DictionaryCompoundWordTokenFilterFactory.java
|
{
"start": 1005,
"end": 1610
}
|
class ____ extends AbstractCompoundWordTokenFilterFactory {
DictionaryCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, env, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new DictionaryCompoundWordTokenFilter(
tokenStream,
wordList,
minWordSize,
minSubwordSize,
maxSubwordSize,
onlyLongestMatch,
reuseChars
);
}
}
|
DictionaryCompoundWordTokenFilterFactory
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/zookeeper/ZooKeeperExtension.java
|
{
"start": 1794,
"end": 4212
}
|
class ____ implements CustomExtension {
private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperExtension.class);
@Nullable private TestingServer zooKeeperServer;
@Nullable private CuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper;
@Override
public void before(ExtensionContext context) throws Exception {
close();
zooKeeperServer = ZooKeeperTestUtils.createAndStartZookeeperTestingServer();
}
@Override
public void after(ExtensionContext context) throws Exception {
try {
close();
} catch (IOException e) {
LOG.warn("Could not properly terminate the {}.", getClass().getSimpleName(), e);
}
}
public void close() throws IOException {
terminateCuratorFrameworkWrapper();
terminateZooKeeperServer();
}
private void terminateCuratorFrameworkWrapper() {
if (curatorFrameworkWrapper != null) {
curatorFrameworkWrapper.close();
curatorFrameworkWrapper = null;
}
}
private void terminateZooKeeperServer() throws IOException {
if (zooKeeperServer != null) {
zooKeeperServer.close();
zooKeeperServer = null;
}
}
public String getConnectString() {
return getRunningZookeeperInstanceOrFail().getConnectString();
}
private TestingServer getRunningZookeeperInstanceOrFail() {
Preconditions.checkState(zooKeeperServer != null);
return zooKeeperServer;
}
public CuratorFramework getZooKeeperClient(FatalErrorHandler fatalErrorHandler) {
if (curatorFrameworkWrapper == null) {
curatorFrameworkWrapper = createCuratorFramework(fatalErrorHandler);
}
return curatorFrameworkWrapper.asCuratorFramework();
}
private CuratorFrameworkWithUnhandledErrorListener createCuratorFramework(
FatalErrorHandler fatalErrorHandler) {
final Configuration config = new Configuration();
config.set(HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, getConnectString());
return ZooKeeperUtils.startCuratorFramework(config, fatalErrorHandler);
}
public void restart() throws Exception {
getRunningZookeeperInstanceOrFail().restart();
}
public void stop() throws IOException {
getRunningZookeeperInstanceOrFail().stop();
}
}
|
ZooKeeperExtension
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/writing/DependencyMethodProducerCreationExpression.java
|
{
"start": 3959,
"end": 5326
}
|
class ____ with
// DependencyMethodProviderCreationExpression, we wouldn't need to deal with
// this and might be able to avoid potentially creating an extra field in
// the component?
componentImplementation.name().nestedClass("Anonymous")))
.build();
// TODO(b/70395982): Explore using a private static type instead of an anonymous class.
XTypeName keyType = binding.key().type().xprocessing().asTypeName();
return XCodeBlock.of(
"%L",
XTypeSpec.anonymousClassBuilder("")
.superclass(dependencyMethodProducerOf(keyType))
.addProperty(dependencyProperty)
.addFunction(
XFunSpec.builder(
/* name= */ "callDependencyMethod",
/* visibility= */ VisibilityModifier.PUBLIC,
/* isOpen= */ false,
/* isOverride= */ true,
/* addJavaNullabilityAnnotation= */ false)
.returns(listenableFutureOf(keyType))
.addStatement(
"return %N.%N()",
dependencyProperty, asMethod(binding.bindingElement().get()).getJvmName())
.build())
.build());
}
@AssistedFactory
static
|
as
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/fetching/FetchModeSelectTest.java
|
{
"start": 2374,
"end": 3040
}
|
class ____ {
@Id
private Long id;
@OneToMany(mappedBy = "department", fetch = FetchType.LAZY)
@Fetch(FetchMode.SELECT)
private List<Employee> employees = new ArrayList<>();
//Getters and setters omitted for brevity
//end::fetching-strategies-fetch-mode-select-mapping-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public List<Employee> getEmployees() {
return employees;
}
public void setEmployees(List<Employee> employees) {
this.employees = employees;
}
//tag::fetching-strategies-fetch-mode-select-mapping-example[]
}
@Entity(name = "Employee")
public static
|
Department
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java
|
{
"start": 102119,
"end": 102533
}
|
class ____ implements Procedure {
public Integer[] call(Object procedureContext, int n) {
return null;
}
public Integer[] call(Object procedureContext, long n) {
return null;
}
public Integer[] call(Object procedureContext, @DataTypeHint("DECIMAL(10, 2)") Object n) {
return null;
}
}
private static
|
NamedArgumentsProcedure
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_865/ProjCoreUserEntity.java
|
{
"start": 233,
"end": 461
}
|
class ____ {
private String memberName;
public String getMemberName() {
return memberName;
}
public void setMemberName(String memberName) {
this.memberName = memberName;
}
}
|
ProjCoreUserEntity
|
java
|
apache__flink
|
flink-datastream/src/main/java/org/apache/flink/datastream/impl/extension/window/function/InternalTwoInputWindowStreamProcessFunction.java
|
{
"start": 1712,
"end": 2086
}
|
class ____ wrap a {@link TwoInputNonBroadcastWindowStreamProcessFunction} to process function.
* This will be translated to a window operator instead of vanilla process operator.
*
* @param <IN1> Type of the first input elements.
* @param <IN2> Type of the second input elements.
* @param <OUT> Type of the output elements.
* @param <W> Type of the window.
*/
public
|
that
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/teradata/ast/TDNormalize.java
|
{
"start": 229,
"end": 1145
}
|
class ____ extends SQLObjectImpl implements TDObject {
private boolean meets;
private boolean overlaps;
private boolean meetsFirst;
public boolean isMeets() {
return meets;
}
public void setMeets(boolean meets) {
this.meets = meets;
}
public boolean isOverlaps() {
return overlaps;
}
public void setOverlaps(boolean overlaps) {
this.overlaps = overlaps;
}
public boolean isMeetsFirst() {
return meetsFirst;
}
public void setMeetsFirst(boolean meetsFirst) {
this.meetsFirst = meetsFirst;
}
@Override
public void accept0(SQLASTVisitor v) {
if (v instanceof TDASTVisitor) {
accept0((TDASTVisitor) v);
}
}
@Override
public void accept0(TDASTVisitor visitor) {
if (visitor.visit(this)) {
visitor.endVisit(this);
}
}
}
|
TDNormalize
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/tests/http/Http2HAProxyTest.java
|
{
"start": 619,
"end": 959
}
|
class ____ extends HAProxyTest {
@Override
protected HttpServerOptions createBaseServerOptions() {
return Http2TestBase.createHttp2ServerOptions(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST);
}
@Override
protected HttpClientOptions createBaseClientOptions() {
return Http2TestBase.createHttp2ClientOptions();
}
}
|
Http2HAProxyTest
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/jsontype/impl/AsWrapperTypeDeserializer.java
|
{
"start": 699,
"end": 4886
}
|
class ____
extends TypeDeserializerBase
{
public AsWrapperTypeDeserializer(JavaType bt, TypeIdResolver idRes,
String typePropertyName, boolean typeIdVisible, JavaType defaultImpl)
{
super(bt, idRes, typePropertyName, typeIdVisible, defaultImpl);
}
protected AsWrapperTypeDeserializer(AsWrapperTypeDeserializer src, BeanProperty property) {
super(src, property);
}
@Override
public TypeDeserializer forProperty(BeanProperty prop) {
return (prop == _property) ? this : new AsWrapperTypeDeserializer(this, prop);
}
@Override
public As getTypeInclusion() { return As.WRAPPER_OBJECT; }
/**
* Deserializing type id enclosed using WRAPPER_OBJECT style is straightforward
*/
@Override
public Object deserializeTypedFromObject(JsonParser jp, DeserializationContext ctxt) throws JacksonException {
return _deserialize(jp, ctxt);
}
@Override
public Object deserializeTypedFromArray(JsonParser jp, DeserializationContext ctxt) throws JacksonException {
return _deserialize(jp, ctxt);
}
@Override
public Object deserializeTypedFromScalar(JsonParser jp, DeserializationContext ctxt) throws JacksonException {
return _deserialize(jp, ctxt);
}
@Override
public Object deserializeTypedFromAny(JsonParser jp, DeserializationContext ctxt) throws JacksonException {
return _deserialize(jp, ctxt);
}
/*
/***************************************************************
/* Internal methods
/***************************************************************
*/
/**
* Method that handles type information wrapper, locates actual
* subtype deserializer to use, and calls it to do actual
* deserialization.
*/
@SuppressWarnings("resource")
protected Object _deserialize(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
// 02-Aug-2013, tatu: May need to use native type ids
if (p.canReadTypeId()) {
Object typeId = p.getTypeId();
if (typeId != null) {
return _deserializeWithNativeTypeId(p, ctxt, typeId);
}
}
// first, sanity checks
JsonToken t = p.currentToken();
if (t == JsonToken.START_OBJECT) {
// should always get field name, but just in case...
if (p.nextToken() != JsonToken.PROPERTY_NAME) {
ctxt.reportWrongTokenException(baseType(), JsonToken.PROPERTY_NAME,
"need JSON String that contains type id (for subtype of "+baseTypeName()+")");
}
} else if (t != JsonToken.PROPERTY_NAME) {
ctxt.reportWrongTokenException(baseType(), JsonToken.START_OBJECT,
"need JSON Object to contain As.WRAPPER_OBJECT type information for class "+baseTypeName());
}
final String typeId = p.getString();
ValueDeserializer<Object> deser = _findDeserializer(ctxt, typeId);
p.nextToken();
// Minor complication: we may need to merge type id in?
if (_typeIdVisible && p.isExpectedStartObjectToken()) {
// but what if there's nowhere to add it in? Error? Or skip? For now, skip.
TokenBuffer tb = ctxt.bufferForInputBuffering(p);
tb.writeStartObject(); // recreate START_OBJECT
tb.writeName(_typePropertyName);
tb.writeString(typeId);
// 02-Jul-2016, tatu: Depending on for JsonParserSequence is initialized it may
// try to access current token; ensure there isn't one
p.clearCurrentToken();
p = JsonParserSequence.createFlattened(false, tb.asParser(ctxt, p), p);
p.nextToken();
}
Object value = deser.deserialize(p, ctxt);
// And then need the closing END_OBJECT
if (p.nextToken() != JsonToken.END_OBJECT) {
ctxt.reportWrongTokenException(baseType(), JsonToken.END_OBJECT,
"expected closing END_OBJECT after type information and deserialized value");
}
return value;
}
}
|
AsWrapperTypeDeserializer
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/jackson/ThrowableProxyWithoutStacktraceMixIn.java
|
{
"start": 1308,
"end": 2765
}
|
class ____ {
@JsonProperty(JsonConstants.ELT_CAUSE)
@JacksonXmlProperty(namespace = XmlConstants.XML_NAMESPACE, localName = XmlConstants.ELT_CAUSE)
private ThrowableProxyWithoutStacktraceMixIn causeProxy;
@JsonProperty
@JacksonXmlProperty(isAttribute = true)
private int commonElementCount;
@JsonIgnore
private ExtendedStackTraceElement[] extendedStackTrace;
@JsonProperty
@JacksonXmlProperty(isAttribute = true)
private String localizedMessage;
@JsonProperty
@JacksonXmlProperty(isAttribute = true)
private String message;
@JsonProperty
@JacksonXmlProperty(isAttribute = true)
private String name;
@JsonIgnore
private transient Throwable throwable;
@JsonIgnore
public abstract String getCauseStackTraceAsString();
@JsonIgnore
public abstract String getExtendedStackTraceAsString();
@JsonIgnore
public abstract StackTraceElement[] getStackTrace();
@JsonProperty(JsonConstants.ELT_SUPPRESSED)
@JacksonXmlElementWrapper(namespace = XmlConstants.XML_NAMESPACE, localName = XmlConstants.ELT_SUPPRESSED)
@JacksonXmlProperty(namespace = XmlConstants.XML_NAMESPACE, localName = XmlConstants.ELT_SUPPRESSED_ITEM)
public abstract ThrowableProxy[] getSuppressedProxies();
@JsonIgnore
public abstract String getSuppressedStackTrace();
@JsonIgnore
public abstract Throwable getThrowable();
}
|
ThrowableProxyWithoutStacktraceMixIn
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableReplay.java
|
{
"start": 18737,
"end": 19624
}
|
interface ____<T> {
/**
* Adds a regular value to the buffer.
* @param value the value to be stored in the buffer
*/
void next(T value);
/**
* Adds a terminal exception to the buffer.
* @param e the error to be stored in the buffer
*/
void error(Throwable e);
/**
* Adds a completion event to the buffer.
*/
void complete();
/**
* Tries to replay the buffered values to the
* subscriber inside the output if there
* is new value and requests available at the
* same time.
* @param output the receiver of the buffered events
*/
void replay(InnerDisposable<T> output);
}
/**
* Holds an unbounded list of events.
*
* @param <T> the value type
*/
static final
|
ReplayBuffer
|
java
|
apache__camel
|
core/camel-xml-jaxp/src/main/java/org/apache/camel/converter/jaxp/XmlConverter.java
|
{
"start": 10771,
"end": 11301
}
|
class ____ add new kinds of conversion).
*/
@Converter(order = 14)
public DOMSource toDOMSource(byte[] bytes) throws IOException, SAXException, ParserConfigurationException {
InputStream is = new ByteArrayInputStream(bytes);
try {
return toDOMSource(is, null);
} finally {
IOHelper.close(is);
}
}
/**
* Converts the source instance to a {@link SAXSource} or returns null if the conversion is not supported (making it
* easy to derive from this
|
to
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy-mutiny-common/runtime/src/test/java/io/quarkus/resteasy/mutiny/common/test/MultiProviderTest.java
|
{
"start": 342,
"end": 898
}
|
class ____ {
private final MultiProvider provider = new MultiProvider();
@Test
public void test() {
Multi<?> multi = Multi.createFrom().items(1, 2, 3);
Publisher<?> publisher = provider.toAsyncStream(multi);
List<?> list = Multi.createFrom().publisher(AdaptersToFlow.publisher(publisher)).collect().asList().await()
.indefinitely();
Assertions.assertEquals(1, list.get(0));
Assertions.assertEquals(2, list.get(1));
Assertions.assertEquals(3, list.get(2));
}
}
|
MultiProviderTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
|
{
"start": 13051,
"end": 13951
}
|
interface ____ {
/**
* Called for each cached bitset on the cache event.
* @param shardId the shard id the bitset was cached for. This can be <code>null</code>
* @param accountable the bitsets ram representation
*/
void onCache(ShardId shardId, Accountable accountable);
/**
* Called for each cached bitset on the removal event.
* @param shardId the shard id the bitset was cached for. This can be <code>null</code>
* @param accountable the bitsets ram representation
*/
void onRemoval(ShardId shardId, Accountable accountable);
Listener NOOP = new Listener() {
@Override
public void onCache(ShardId shardId, Accountable accountable) {}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {}
};
}
}
|
Listener
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/map/MapAssert_contains_Test.java
|
{
"start": 1221,
"end": 2094
}
|
class ____ extends MapAssertBaseTest {
final MapEntry<String, String>[] entries = array(entry("key1", "value1"), entry("key2", "value2"));
@Override
protected MapAssert<Object, Object> invoke_api_method() {
return assertions.contains(entry("key1", "value1"), entry("key2", "value2"));
}
@Override
protected void verify_internal_effects() {
verify(maps).assertContains(getInfo(assertions), getActual(assertions), entries, null);
}
@Test
void invoke_api_like_user() {
assertThat(map("key1", "value1", "key2", "value2")).contains(entry("key2", "value2"));
}
@Test
void should_honor_custom_value_equals_when_comparing_entry_values() {
// GIVEN
var map = Map.of("key", "VALUE");
// WHEN/THEN
then(map).usingEqualsForValues(String::equalsIgnoreCase)
.contains(entry("key", "value"));
}
}
|
MapAssert_contains_Test
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/state/v2/AggregatingStateDescriptor.java
|
{
"start": 1660,
"end": 4110
}
|
class ____<IN, ACC, OUT> extends StateDescriptor<ACC> {
private final AggregateFunction<IN, ACC, OUT> aggregateFunction;
/**
* Create a new {@code AggregatingStateDescriptor} with the given name, function, and type.
*
* @param stateId The (unique) name for the state.
* @param aggregateFunction The {@code AggregateFunction} used to aggregate the state.
* @param typeInfo The type of the accumulator. The accumulator is stored in the state.
*/
public AggregatingStateDescriptor(
@Nonnull String stateId,
@Nonnull AggregateFunction<IN, ACC, OUT> aggregateFunction,
@Nonnull TypeInformation<ACC> typeInfo) {
super(stateId, typeInfo);
this.aggregateFunction = checkNotNull(aggregateFunction);
}
/**
* Create a new {@code AggregatingStateDescriptor} with the given stateId and the given type
* serializer.
*
* @param stateId The (unique) stateId for the state.
* @param serializer The type serializer for accumulator.
*/
public AggregatingStateDescriptor(
@Nonnull String stateId,
@Nonnull AggregateFunction<IN, ACC, OUT> aggregateFunction,
@Nonnull TypeSerializer<ACC> serializer) {
super(stateId, serializer);
this.aggregateFunction = checkNotNull(aggregateFunction);
}
/**
* Creates a new {@code AggregatingStateDescriptor} with the given name, function, and type.
*
* <p>If this constructor fails (because it is not possible to describe the type via a class),
* consider using the {@link #AggregatingStateDescriptor(String, AggregateFunction,
* TypeInformation)} constructor.
*
* @param name The (unique) name for the state.
* @param aggFunction The {@code AggregateFunction} used to aggregate the state.
* @param stateType The type of the accumulator. The accumulator is stored in the state.
*/
public AggregatingStateDescriptor(
String name, AggregateFunction<IN, ACC, OUT> aggFunction, Class<ACC> stateType) {
super(name, stateType);
this.aggregateFunction = checkNotNull(aggFunction);
}
/** Returns the Aggregate function for this state. */
public AggregateFunction<IN, ACC, OUT> getAggregateFunction() {
return aggregateFunction;
}
@Override
public Type getType() {
return Type.AGGREGATING;
}
}
|
AggregatingStateDescriptor
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CouchDbEndpointBuilderFactory.java
|
{
"start": 25400,
"end": 27800
}
|
class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final CouchDbHeaderNameBuilder INSTANCE = new CouchDbHeaderNameBuilder();
/**
* The database the message came from.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code CouchDbDatabase}.
*/
public String couchDbDatabase() {
return "CouchDbDatabase";
}
/**
* The couchdb changeset sequence number of the update / delete message.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code CouchDbSeq}.
*/
public String couchDbSeq() {
return "CouchDbSeq";
}
/**
* The couchdb document id.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code CouchDbId}.
*/
public String couchDbId() {
return "CouchDbId";
}
/**
* The couchdb document revision.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code CouchDbRev}.
*/
public String couchDbRev() {
return "CouchDbRev";
}
/**
* The method (delete / update).
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code CouchDbMethod}.
*/
public String couchDbMethod() {
return "CouchDbMethod";
}
/**
* The resume action to execute when resuming.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code CouchDbResumeAction}.
*/
public String couchDbResumeAction() {
return "CamelCouchDbResumeAction";
}
}
static CouchDbEndpointBuilder endpointBuilder(String componentName, String path) {
|
CouchDbHeaderNameBuilder
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/processors/MulticastProcessor.java
|
{
"start": 7219,
"end": 21402
}
|
class ____<@NonNull T> extends FlowableProcessor<T> {
final AtomicInteger wip;
final AtomicReference<Subscription> upstream;
final AtomicReference<MulticastSubscription<T>[]> subscribers;
final int bufferSize;
final int limit;
final boolean refcount;
volatile SimpleQueue<T> queue;
volatile boolean done;
volatile Throwable error;
int consumed;
int fusionMode;
@SuppressWarnings("rawtypes")
static final MulticastSubscription[] EMPTY = new MulticastSubscription[0];
@SuppressWarnings("rawtypes")
static final MulticastSubscription[] TERMINATED = new MulticastSubscription[0];
/**
* Constructs a fresh instance with the default Flowable.bufferSize() prefetch
* amount and no refCount-behavior.
* @param <T> the input and output value type
* @return the new MulticastProcessor instance
*/
@CheckReturnValue
@NonNull
public static <T> MulticastProcessor<T> create() {
return new MulticastProcessor<>(bufferSize(), false);
}
/**
* Constructs a fresh instance with the default Flowable.bufferSize() prefetch
* amount and the optional refCount-behavior.
* @param <T> the input and output value type
* @param refCount if true and if all Subscribers have canceled, the upstream
* is cancelled
* @return the new MulticastProcessor instance
*/
@CheckReturnValue
@NonNull
public static <T> MulticastProcessor<T> create(boolean refCount) {
return new MulticastProcessor<>(bufferSize(), refCount);
}
/**
* Constructs a fresh instance with the given prefetch amount and no refCount behavior.
* @param bufferSize the prefetch amount
* @param <T> the input and output value type
* @return the new MulticastProcessor instance
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
*/
@CheckReturnValue
@NonNull
public static <T> MulticastProcessor<T> create(int bufferSize) {
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return new MulticastProcessor<>(bufferSize, false);
}
/**
* Constructs a fresh instance with the given prefetch amount and the optional
* refCount-behavior.
* @param bufferSize the prefetch amount
* @param refCount if true and if all Subscribers have canceled, the upstream
* is cancelled
* @param <T> the input and output value type
* @return the new MulticastProcessor instance
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
*/
@CheckReturnValue
@NonNull
public static <T> MulticastProcessor<T> create(int bufferSize, boolean refCount) {
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return new MulticastProcessor<>(bufferSize, refCount);
}
/**
* Constructs a fresh instance with the given prefetch amount and the optional
* refCount-behavior.
* @param bufferSize the prefetch amount
* @param refCount if true and if all Subscribers have canceled, the upstream
* is cancelled
*/
@SuppressWarnings("unchecked")
MulticastProcessor(int bufferSize, boolean refCount) {
this.bufferSize = bufferSize;
this.limit = bufferSize - (bufferSize >> 2);
this.wip = new AtomicInteger();
this.subscribers = new AtomicReference<>(EMPTY);
this.upstream = new AtomicReference<>();
this.refcount = refCount;
}
/**
* Initializes this Processor by setting an upstream Subscription that
* ignores request amounts, uses a fixed buffer
* and allows using the onXXX and offer methods
* afterwards.
*/
public void start() {
if (SubscriptionHelper.setOnce(upstream, EmptySubscription.INSTANCE)) {
queue = new SpscArrayQueue<>(bufferSize);
}
}
/**
* Initializes this Processor by setting an upstream Subscription that
* ignores request amounts, uses an unbounded buffer
* and allows using the onXXX and offer methods
* afterwards.
*/
public void startUnbounded() {
if (SubscriptionHelper.setOnce(upstream, EmptySubscription.INSTANCE)) {
queue = new SpscLinkedArrayQueue<>(bufferSize);
}
}
@Override
public void onSubscribe(@NonNull Subscription s) {
if (SubscriptionHelper.setOnce(upstream, s)) {
if (s instanceof QueueSubscription) {
@SuppressWarnings("unchecked")
QueueSubscription<T> qs = (QueueSubscription<T>)s;
int m = qs.requestFusion(QueueSubscription.ANY);
if (m == QueueSubscription.SYNC) {
fusionMode = m;
queue = qs;
done = true;
drain();
return;
}
if (m == QueueSubscription.ASYNC) {
fusionMode = m;
queue = qs;
s.request(bufferSize);
return;
}
}
queue = new SpscArrayQueue<>(bufferSize);
s.request(bufferSize);
}
}
@Override
public void onNext(@NonNull T t) {
if (done) {
return;
}
if (fusionMode == QueueSubscription.NONE) {
ExceptionHelper.nullCheck(t, "onNext called with a null value.");
if (!queue.offer(t)) {
SubscriptionHelper.cancel(upstream);
onError(MissingBackpressureException.createDefault());
return;
}
}
drain();
}
/**
* Tries to offer an item into the internal queue and returns false
* if the queue is full.
* @param t the item to offer, not {@code null}
* @return true if successful, false if the queue is full
* @throws NullPointerException if {@code t} is {@code null}
* @throws IllegalStateException if the processor is in fusion mode
*/
@CheckReturnValue
public boolean offer(@NonNull T t) {
ExceptionHelper.nullCheck(t, "offer called with a null value.");
if (done) {
return false;
}
if (fusionMode == QueueSubscription.NONE) {
if (queue.offer(t)) {
drain();
return true;
}
return false;
}
throw new IllegalStateException("offer() should not be called in fusion mode!");
}
@Override
public void onError(@NonNull Throwable t) {
ExceptionHelper.nullCheck(t, "onError called with a null Throwable.");
if (!done) {
error = t;
done = true;
drain();
return;
}
RxJavaPlugins.onError(t);
}
@Override
public void onComplete() {
done = true;
drain();
}
@Override
@CheckReturnValue
public boolean hasSubscribers() {
return subscribers.get().length != 0;
}
@Override
@CheckReturnValue
public boolean hasThrowable() {
return done && error != null;
}
@Override
@CheckReturnValue
public boolean hasComplete() {
return done && error == null;
}
@Override
@CheckReturnValue
public Throwable getThrowable() {
return done ? error : null;
}
@Override
protected void subscribeActual(@NonNull Subscriber<? super T> s) {
MulticastSubscription<T> ms = new MulticastSubscription<>(s, this);
s.onSubscribe(ms);
if (add(ms)) {
if (ms.get() == Long.MIN_VALUE) {
remove(ms);
} else {
drain();
}
} else {
if (done) {
Throwable ex = error;
if (ex != null) {
s.onError(ex);
return;
}
}
s.onComplete();
}
}
boolean add(MulticastSubscription<T> inner) {
for (;;) {
MulticastSubscription<T>[] a = subscribers.get();
if (a == TERMINATED) {
return false;
}
int n = a.length;
@SuppressWarnings("unchecked")
MulticastSubscription<T>[] b = new MulticastSubscription[n + 1];
System.arraycopy(a, 0, b, 0, n);
b[n] = inner;
if (subscribers.compareAndSet(a, b)) {
return true;
}
}
}
@SuppressWarnings("unchecked")
void remove(MulticastSubscription<T> inner) {
for (;;) {
MulticastSubscription<T>[] a = subscribers.get();
int n = a.length;
if (n == 0) {
return;
}
int j = -1;
for (int i = 0; i < n; i++) {
if (a[i] == inner) {
j = i;
break;
}
}
if (j < 0) {
break;
}
if (n == 1) {
if (refcount) {
if (subscribers.compareAndSet(a, TERMINATED)) {
SubscriptionHelper.cancel(upstream);
done = true;
break;
}
} else {
if (subscribers.compareAndSet(a, EMPTY)) {
break;
}
}
} else {
MulticastSubscription<T>[] b = new MulticastSubscription[n - 1];
System.arraycopy(a, 0, b, 0, j);
System.arraycopy(a, j + 1, b, j, n - j - 1);
if (subscribers.compareAndSet(a, b)) {
break;
}
}
}
}
@SuppressWarnings("unchecked")
void drain() {
if (wip.getAndIncrement() != 0) {
return;
}
int missed = 1;
AtomicReference<MulticastSubscription<T>[]> subs = subscribers;
int c = consumed;
int lim = limit;
int fm = fusionMode;
outer:
for (;;) {
SimpleQueue<T> q = queue;
if (q != null) {
MulticastSubscription<T>[] as = subs.get();
int n = as.length;
if (n != 0) {
long r = -1L;
for (MulticastSubscription<T> a : as) {
long ra = a.get();
if (ra >= 0L) {
if (r == -1L) {
r = ra - a.emitted;
} else {
r = Math.min(r, ra - a.emitted);
}
}
}
while (r > 0L) {
MulticastSubscription<T>[] bs = subs.get();
if (bs == TERMINATED) {
q.clear();
return;
}
if (as != bs) {
continue outer;
}
boolean d = done;
T v;
try {
v = q.poll();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
SubscriptionHelper.cancel(upstream);
d = true;
v = null;
error = ex;
done = true;
}
boolean empty = v == null;
if (d && empty) {
Throwable ex = error;
if (ex != null) {
for (MulticastSubscription<T> inner : subs.getAndSet(TERMINATED)) {
inner.onError(ex);
}
} else {
for (MulticastSubscription<T> inner : subs.getAndSet(TERMINATED)) {
inner.onComplete();
}
}
return;
}
if (empty) {
break;
}
for (MulticastSubscription<T> inner : as) {
inner.onNext(v);
}
r--;
if (fm != QueueSubscription.SYNC) {
if (++c == lim) {
c = 0;
upstream.get().request(lim);
}
}
}
if (r == 0) {
MulticastSubscription<T>[] bs = subs.get();
if (bs == TERMINATED) {
q.clear();
return;
}
if (as != bs) {
continue;
}
if (done && q.isEmpty()) {
Throwable ex = error;
if (ex != null) {
for (MulticastSubscription<T> inner : subs.getAndSet(TERMINATED)) {
inner.onError(ex);
}
} else {
for (MulticastSubscription<T> inner : subs.getAndSet(TERMINATED)) {
inner.onComplete();
}
}
return;
}
}
}
}
consumed = c;
missed = wip.addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
static final
|
MulticastProcessor
|
java
|
apache__camel
|
components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileEndpoint.java
|
{
"start": 2208,
"end": 108463
}
|
class ____<T> extends ScheduledPollEndpoint implements BrowsableEndpoint {
protected static final int DEFAULT_IDEMPOTENT_CACHE_SIZE = 1000;
protected static final int DEFAULT_IN_PROGRESS_CACHE_SIZE = 50000;
private static final Logger LOG = LoggerFactory.getLogger(GenericFileEndpoint.class);
// common options
@UriParam(label = "advanced", defaultValue = "true", description = "Automatically create missing directories in "
+ "the file's pathname. For the file consumer, that means creating the starting directory. For the file "
+ "producer, it means the directory the files should be written to.")
protected boolean autoCreate = true;
@UriParam(label = "advanced", defaultValue = "" + FileUtil.BUFFER_SIZE, description = "Buffer size in bytes used "
+ "for writing files (or in case of FTP for downloading and uploading files).")
protected int bufferSize = FileUtil.BUFFER_SIZE;
@UriParam(description = "This option is used to specify the encoding of the file. You can use this on the "
+ "consumer, to specify the encodings of the files, which allow Camel to know the charset it should load "
+ "the file content in case the file content is being accessed. Likewise when writing a file, you can use "
+ "this option to specify which charset to write the file as well. Do mind that when writing the file "
+ "Camel may have to read the message content into memory to be able to convert the data into the "
+ "configured charset, so do not use this if you have big messages.")
protected String charset;
@UriParam(javaType = "java.lang.String", description = "Use Expression such as File Language to dynamically set "
+ "the filename. For consumers, it's used as a filename filter. For producers, it's used to evaluate the "
+ "filename to write. If an expression is set, it take precedence over the CamelFileName header. (Note: "
+ "The header itself can also be an Expression). The expression options support both String and Expression "
+ "types. If the expression is a String type, it is always evaluated using the File Language. If the "
+ "expression is an Expression type, the specified Expression type is used - this allows you, for "
+ "instance, to use OGNL expressions. For the consumer, you can use it to filter filenames, so you can "
+ "for instance consume today's file using the File Language syntax: mydata-${date:now:yyyyMMdd}.txt. The "
+ "producers support the CamelOverruleFileName header which takes precedence over any existing "
+ "CamelFileName header; the CamelOverruleFileName is a header that is used only once, and makes it easier "
+ "as this avoids to temporary store CamelFileName and have to restore it afterwards.")
protected Expression fileName;
@UriParam(description = "Producer: If provided, then Camel will write a 2nd done file when the original file has "
+ "been written. The done file will be empty. This option configures what file name to use. Either you can "
+ "specify a fixed name. Or you can use dynamic placeholders. The done file will always be written in the "
+ "same folder as the original file.<p/> Consumer: If provided, Camel will only consume files if a done "
+ "file exists. This option configures what file name to use. Either you can specify a fixed name. Or you "
+ "can use dynamic placeholders.The done file is always expected in the same folder as the original "
+ "file.<p/> Only ${file.name} and ${file.name.next} is supported as dynamic placeholders.")
protected String doneFileName;
@UriParam(label = "advanced", defaultValue = "100",
description = "Maximum number of messages to keep in memory available for browsing. Use 0 for unlimited.")
private int browseLimit = 100;
// producer options
@UriParam(label = "producer", description = "Flatten is used to flatten the file name path to strip any leading "
+ "paths, so it's just the file name. This allows you to consume recursively into sub-directories, but "
+ "when you eg write the files to another directory they will be written in a single directory. "
+ "Setting this to true on the producer enforces that any file name in CamelFileName header will be "
+ "stripped for any leading paths.")
protected boolean flatten;
@UriParam(label = "producer", defaultValue = "Override", description = "What to do if a file already exists with "
+ "the same name. Override, which is the default, replaces the existing file.<p/>"
+ " - Append - adds content to the existing file.<p/> "
+ " - Fail - throws a GenericFileOperationException, indicating that there is already an existing file.<p/> "
+ " - Ignore - silently ignores the problem and does not override the existing file, "
+ "but assumes everything is okay.<p/> "
+ " - Move - option requires to use the moveExisting option to be configured as well. The option "
+ "eagerDeleteTargetFile can be used to control what to do if an moving the file, and there "
+ "exists already an existing file, otherwise causing the move operation to fail. The Move option will move "
+ "any existing files, before writing the target file.<p/> "
+ " - TryRename is only applicable if tempFileName option is in use. This allows to try renaming the file "
+ "from the temporary name to the actual name, without doing any exists check. This check may be faster on "
+ "some file systems and especially FTP servers.")
protected GenericFileExist fileExist = GenericFileExist.Override;
@UriParam(label = "producer", description = "This option is used to write the file using a temporary name and "
+ "then, after the write is complete, rename it to the real name. Can be used to identify files being "
+ "written and also avoid consumers (not using exclusive read locks) reading in progress files. Is often "
+ "used by FTP when uploading big files.")
protected String tempPrefix;
@UriParam(label = "producer", javaType = "java.lang.String", description = "The same as tempPrefix option but "
+ "offering a more fine grained control on the naming of the temporary filename as it uses the File "
+ "Language. The location for tempFilename is relative to the final file location in the option "
+ "'fileName', not the target directory in the base uri. For example if option fileName includes a "
+ "directory prefix: dir/finalFilename then tempFileName is relative to that subdirectory dir.")
protected Expression tempFileName;
@UriParam(label = "producer,advanced", defaultValue = "true", description = "Whether or not to eagerly delete "
+ "any existing target file. This option only applies when you use fileExists=Override and the "
+ "tempFileName option as well. You can use this to disable (set it to false) deleting the target "
+ "file before the temp file is written. For example you may write big files and want the target file "
+ "to exists during the temp file is being written. This ensure the target file is only deleted until "
+ "the very last moment, just before the temp file is being renamed to the target filename. This option "
+ "is also used to control whether to delete any existing files when fileExist=Move is enabled, and an "
+ "existing file exists. If this option copyAndDeleteOnRenameFails false, then an exception will be thrown "
+ "if an existing file existed, if its true, then the existing file is deleted before the move operation.")
protected boolean eagerDeleteTargetFile = true;
@UriParam(label = "producer,advanced", description = "Will keep the last modified timestamp from the source file "
+ "(if any). Will use the FileConstants.FILE_LAST_MODIFIED header to located the timestamp. This header can "
+ "contain either a java.util.Date or long with the timestamp. If the timestamp exists and the option is "
+ "enabled it will set this timestamp on the written file. Note: This option only applies to the file "
+ "producer. You cannot use this option with any of the ftp producers.")
protected boolean keepLastModified;
@UriParam(label = "producer,advanced", description = "Used to specify if a null body is allowed during file "
+ "writing. If set to true then an empty file will be created, when set to false, and attempting to send"
+ " a null body to the file component, a GenericFileWriteException of 'Cannot write null body to file.' "
+ "will be thrown. If the `fileExist` option is set to 'Override', then the file will be truncated, and "
+ "if set to `append` the file will remain unchanged.")
protected boolean allowNullBody;
@UriParam(label = "producer", defaultValue = "true", description = "Used for jailing (restricting) writing files "
+ "to the starting directory (and sub) only. This is enabled by default to not allow Camel to write files "
+ "to outside directories (to be more secured out of the box). You can turn this off to allow writing "
+ "files to directories outside the starting directory, such as parent or root folders.")
protected boolean jailStartingDirectory = true;
@UriParam(label = "producer", description = "Used to append characters (text) after writing files. This can for "
+ "example be used to add new lines or other separators when writing and appending new files or existing files. <p/> "
+ "To specify new-line (slash-n or slash-r) or tab (slash-t) characters then escape with an extra slash, "
+ "eg slash-slash-n.")
protected String appendChars;
@UriParam(label = "producer",
enums = "MD2,MD5,SHA_1,SHA_224,SHA_256,SHA_384,SHA_512,SHA_512_224,SHA_512_256,SHA3_224,SHA3_256,SHA3_384,SHA3_512",
description = "If provided, then Camel will write a checksum file when the original file has been written. The checksum file"
+ " will contain the checksum created with the provided algorithm for the original file. The checksum file will"
+ " always be written in the same folder as the original file.")
protected String checksumFileAlgorithm;
// consumer options
@UriParam
protected GenericFileConfiguration configuration;
@UriParam(label = "consumer,advanced",
description = "A pluggable " + "org.apache.camel.component.file.GenericFileProcessStrategy "
+ "allowing you to implement your own readLock option or similar. Can also be used when special conditions "
+ "must be met before a file can be consumed, such as a special ready file exists. If this option is set "
+ "then the readLock option does not apply.")
protected GenericFileProcessStrategy<T> processStrategy;
@UriParam(label = "consumer,advanced", description = "A pluggable in-progress repository "
+ "org.apache.camel.spi.IdempotentRepository. The in-progress repository is used to account the current in "
+ "progress files being consumed. By default a memory based repository is used.")
protected IdempotentRepository inProgressRepository
= MemoryIdempotentRepository.memoryIdempotentRepository(DEFAULT_IN_PROGRESS_CACHE_SIZE);
@UriParam(label = "consumer,advanced", description = "When consuming, a local work directory can be used to "
+ "store the remote file content directly in local files, to avoid loading the content into memory. This "
+ "is beneficial, if you consume a very big remote file and thus can conserve memory.")
protected String localWorkDirectory;
@UriParam(label = "consumer", description = "If true, the file is not moved or deleted in any way. This option "
+ "is good for readonly data, or for ETL type requirements. If noop=true, Camel will set idempotent=true "
+ "as well, to avoid consuming the same files over and over again.")
protected boolean noop;
@UriParam(label = "consumer", description = "If a directory, will look for files in all the sub-directories as well.")
protected boolean recursive;
@UriParam(label = "consumer", description = "If true, the file will be deleted after it is processed successfully.")
protected boolean delete;
@UriParam(label = "consumer", description = "When pre-sort is enabled then the consumer will sort the file and "
+ "directory names during polling, that was retrieved from the file system. You may want to do this in "
+ "case you need to operate on the files in a sorted order. The pre-sort is executed before the consumer "
+ "starts to filter, and accept files to process by Camel. This option is default=false meaning disabled.")
protected boolean preSort;
@UriParam(label = "consumer,filter", description = "To define a maximum messages to gather per poll. By default "
+ "no maximum is set. Can be used to set a limit of e.g. 1000 to avoid when starting up the server that "
+ "there are thousands of files. Set a value of 0 or negative to disabled it. Notice: If this option is "
+ "in use then the File and FTP components will limit before any sorting. For example if you have 100000 "
+ "files and use maxMessagesPerPoll=500, then only the first 500 files will be picked up, and then sorted. "
+ "You can use the eagerMaxMessagesPerPoll option and set this to false to allow to scan all files first "
+ "and then sort afterwards.")
protected int maxMessagesPerPoll;
@UriParam(label = "consumer,filter", defaultValue = "true", description = "Allows for controlling whether the "
+ "limit from maxMessagesPerPoll is eager or not. If eager then the limit is during the scanning of files. "
+ "Where as false would scan all files, and then perform sorting. Setting this option to false allows for "
+ "sorting all files first, and then limit the poll. Mind that this requires a higher memory usage as all "
+ "file details are in memory to perform the sorting.")
protected boolean eagerMaxMessagesPerPoll = true;
@UriParam(label = "consumer,filter", defaultValue = "" + Integer.MAX_VALUE,
description = "The maximum depth to " + "traverse when recursively processing a directory.")
protected int maxDepth = Integer.MAX_VALUE;
@UriParam(label = "consumer,filter", description = "The minimum depth to start processing when recursively "
+ "processing a directory. Using minDepth=1 means the base directory. Using minDepth=2 means the first "
+ "sub directory.")
protected int minDepth;
@UriParam(label = "consumer,filter", description = "Is used to include files, if filename matches the regex "
+ "pattern (matching is case in-sensitive). <p/> Notice if you use symbols such as plus sign and others "
+ "you would need to configure this using the RAW() syntax if configuring this as an endpoint uri. See "
+ "more details at <a href=\"http://camel.apache.org/how-do-i-configure-endpoints.html\">configuring "
+ "endpoint uris</a>")
protected String include;
@UriParam(label = "consumer,filter", description = "Is used to exclude files, if filename matches the regex "
+ "pattern (matching is case in-sensitive). <p/> Notice if you use symbols such as plus sign and others "
+ "you would need to configure this using the RAW() syntax if configuring this as an endpoint uri. See "
+ "more details at <a href=\"http://camel.apache.org/how-do-i-configure-endpoints.html\">configuring "
+ "endpoint uris</a>")
protected String exclude;
@UriParam(label = "consumer,filter",
description = "Is used to include files matching file extension name (case insensitive). For example to include txt files, then use includeExt=txt."
+ " Multiple extensions can be separated by comma, for example to include txt and xml files, use includeExt=txt,xml."
+ " Note that the file extension includes all parts, for example having a file named mydata.tar.gz will have extension as tar.gz."
+ " For more flexibility then use the include/exclude options.")
protected String includeExt;
@UriParam(label = "consumer,filter",
description = "Is used to exclude files matching file extension name (case insensitive). For example to exclude bak files, then use excludeExt=bak."
+ " Multiple extensions can be separated by comma, for example to exclude bak and dat files, use excludeExt=bak,dat."
+ " Note that the file extension includes all parts, for example having a file named mydata.tar.gz will have extension as tar.gz."
+ " For more flexibility then use the include/exclude options.")
protected String excludeExt;
@UriParam(label = "consumer,filter", javaType = "java.lang.String", description = "Expression (such as Simple "
+ "Language) used to dynamically set the filename when moving it after processing. To move files into "
+ "a .done subdirectory just enter .done.")
protected Expression move;
@UriParam(label = "consumer", javaType = "java.lang.String", description = "Sets the move failure expression "
+ "based on Simple language. For example, to move files into a .error subdirectory use: .error. Note: "
+ "When moving the files to the fail location Camel will handle the error and will not pick up the "
+ "file again.")
protected Expression moveFailed;
@UriParam(label = "consumer", javaType = "java.lang.String", description = "Expression (such as File Language) "
+ "used to dynamically set the filename when moving it before processing. For example to move in-progress "
+ "files into the order directory set this value to order.")
protected Expression preMove;
@UriParam(label = "producer", javaType = "java.lang.String", description = "Expression (such as File Language) "
+ "used to compute file name to use when fileExist=Move is configured. To move files into a backup "
+ "subdirectory just enter backup. This option only supports the following File Language tokens: "
+ "\"file:name\", \"file:name.ext\", \"file:name.noext\", \"file:onlyname\", \"file:onlyname.noext\", "
+ "\"file:ext\", and \"file:parent\". Notice the \"file:parent\" is not supported by the FTP component, "
+ "as the FTP component can only move any existing files to a relative directory based on current dir "
+ "as base.")
protected Expression moveExisting;
@UriParam(label = "producer,advanced", description = "Strategy (Custom Strategy) used to move file with special "
+ "naming token to use when fileExist=Move is configured. By default, there is an implementation used if "
+ "no custom strategy is provided")
protected FileMoveExistingStrategy moveExistingFileStrategy;
@UriParam(label = "consumer,filter", defaultValue = "false", description = "Option to use the Idempotent "
+ "Consumer EIP pattern to let Camel skip already processed files. Will by default use a memory based "
+ "LRUCache that holds 1000 entries. If noop=true then idempotent will be enabled as well to avoid "
+ "consuming the same files over and over again.")
protected Boolean idempotent;
@UriParam(label = "consumer,filter", javaType = "java.lang.Boolean", defaultValue = "true",
description = "Sets whether to eagerly add the filename to the idempotent repository or wait until the exchange is complete.")
private Boolean idempotentEager = Boolean.TRUE;
@UriParam(label = "consumer,filter", javaType = "java.lang.String", description = "To use a custom idempotent "
+ "key. By default the absolute path of the file is used. You can use the File Language, for example to "
+ "use the file name and file size, you can do: idempotentKey=${file:name}-${file:size}")
protected Expression idempotentKey;
@UriParam(label = "consumer,filter", description = "A pluggable repository org.apache.camel.spi.IdempotentRepository "
+ "which by default use MemoryIdempotentRepository if none is specified and idempotent is true.")
protected IdempotentRepository idempotentRepository;
@UriParam(label = "consumer,filter",
description = "Pluggable filter as a org.apache.camel.component.file.GenericFileFilter "
+ "class. Will skip files if filter returns false in its accept() method.")
protected GenericFileFilter<T> filter;
@UriParam(label = "consumer,filter", javaType = "java.lang.String", description = "Filters the directory based on "
+ "Simple language. For example to filter on current date, you can use a simple date pattern such as "
+ "${date:now:yyyMMdd}")
protected Predicate filterDirectory;
@UriParam(label = "consumer,filter", javaType = "java.lang.String", description = "Filters the file based on "
+ "Simple language. For example to filter on file size, you can use ${file:size} > 5000")
protected Predicate filterFile;
@UriParam(label = "consumer,filter", defaultValue = "true", description = "Sets case sensitive flag on ant filter.")
protected boolean antFilterCaseSensitive = true;
protected volatile AntFilter antFilter;
@UriParam(label = "consumer,filter",
description = "Ant style filter inclusion. Multiple inclusions may be " + "specified in comma-delimited format.")
protected String antInclude;
@UriParam(label = "consumer,filter",
description = "Ant style filter exclusion. If both antInclude and antExclude are used, antExclude takes precedence over antInclude. Multiple exclusions may be specified in comma-delimited format.")
protected String antExclude;
@UriParam(label = "consumer,sort",
description = "Pluggable sorter as a "
+ "java.util.Comparator<org.apache.camel.component.file.GenericFile> class.")
protected Comparator<GenericFile<T>> sorter;
@UriParam(label = "consumer,sort", javaType = "java.lang.String", description = "Built-in sort by using the "
+ "File Language. Supports nested sorts, so you can have a sort by file name and as a 2nd group sort "
+ "by modified date.")
protected Comparator<Exchange> sortBy;
@UriParam(label = "consumer,sort", description = "To shuffle the list of files (sort in random order)")
protected boolean shuffle;
@UriParam(label = "consumer,lock", defaultValue = "none",
enums = "none,markerFile,fileLock,rename,changed,idempotent,idempotent-changed,idempotent-rename",
description = "Used by consumer, to only poll the files if it has exclusive read-lock on the file (i.e. "
+ "the file is not in-progress or being written). Camel will wait until the file lock is granted. "
+ "This option provides the build in strategies:<p/>"
+ " - none - No read lock is in use<p/>"
+ " - markerFile - Camel creates a marker file (fileName.camelLock) and then holds a lock on it. "
+ "This option is not available for the FTP component<p/>"
+ " - changed - Changed is using file length/modification timestamp to detect whether the file "
+ "is currently being copied or not. Will at least use 1 sec to determine this, so this option "
+ "cannot consume files as fast as the others, but can be more reliable as the JDK IO API "
+ "cannot always determine whether a file is currently being used by another process. The option "
+ "readLockCheckInterval can be used to set the check frequency.<p/>"
+ " - fileLock - is for using java.nio.channels.FileLock. This option is not avail for Windows OS "
+ "and the FTP component. This approach should be avoided when accessing a remote file system via "
+ "a mount/share unless that file system supports distributed file locks.<p/>"
+ " - rename - rename is for using a try to rename the file as a test if we can get exclusive "
+ "read-lock.<p/>"
+ " - idempotent - (only for file component) idempotent is for using a idempotentRepository "
+ "as the read-lock. This allows to use read locks that supports clustering if the idempotent "
+ "repository implementation supports that.<p/>"
+ " - idempotent-changed - (only for file component) idempotent-changed is for using a "
+ "idempotentRepository and changed as the combined read-lock. This allows to use read locks "
+ "that supports clustering if the idempotent repository implementation supports that.<p/>"
+ " - idempotent-rename - (only for file component) idempotent-rename is for using a "
+ "idempotentRepository and rename as the combined read-lock. This allows to use read locks "
+ "that supports clustering if the idempotent repository implementation supports that.<p/>"
+ "Notice: The various read locks is not all suited to work in clustered mode, where concurrent "
+ "consumers on different nodes is competing for the same files on a shared file system. The "
+ "markerFile using a close to atomic operation to create the empty marker file, but its not "
+ "guaranteed to work in a cluster. The fileLock may work better but then the file system need "
+ "to support distributed file locks, and so on. Using the idempotent read lock can support "
+ "clustering if the idempotent repository supports clustering, such as Hazelcast Component or "
+ "Infinispan.")
protected String readLock = "none";
@UriParam(label = "consumer,lock", defaultValue = "1000", description = "Interval in millis for the read-lock, "
+ "if supported by the read lock. This interval is used for sleeping between attempts to acquire the read "
+ "lock. For example when using the changed read lock, you can set a higher interval period to cater for "
+ "slow writes. The default of 1 sec. may be too fast if the producer is very slow writing the file. <p/>"
+ "Notice: For FTP the default readLockCheckInterval is 5000. <p/> The readLockTimeout value must be "
+ "higher than readLockCheckInterval, but a rule of thumb is to have a timeout that is at least 2 or more "
+ "times higher than the readLockCheckInterval. This is needed to ensure that ample time is allowed for "
+ "the read lock process to try to grab the lock before the timeout was hit.")
protected long readLockCheckInterval = 1000;
@UriParam(label = "consumer,lock", defaultValue = "10000", description = "Optional timeout in millis for the "
+ "read-lock, if supported by the read-lock. If the read-lock could not be granted and the timeout "
+ "triggered, then Camel will skip the file. At next poll Camel, will try the file again, and this time "
+ "maybe the read-lock could be granted. Use a value of 0 or lower to indicate forever. Currently "
+ "fileLock, changed and rename support the timeout. <p/> Notice: For FTP the default readLockTimeout "
+ "value is 20000 instead of 10000. <p/> The readLockTimeout value must be higher than "
+ "readLockCheckInterval, but a rule of thumb is to have a timeout that is at least 2 or more times "
+ "higher than the readLockCheckInterval. This is needed to ensure that ample time is allowed for the "
+ "read lock process to try to grab the lock before the timeout was hit.")
protected long readLockTimeout = 10000;
@UriParam(label = "consumer,lock", defaultValue = "true", description = "Whether to use marker file with the "
+ "changed, rename, or exclusive read lock types. By default a marker file is used as well to guard "
+ "against other processes picking up the same files. This behavior can be turned off by setting this "
+ "option to false. For example if you do not want to write marker files to the file systems by the "
+ "Camel application.")
protected boolean readLockMarkerFile = true;
@UriParam(label = "consumer,lock", defaultValue = "true", description = "Whether or not read lock with marker "
+ "files should upon startup delete any orphan read lock files, which may have been left on the file "
+ "system, if Camel was not properly shutdown (such as a JVM crash). <p/> If turning this option to "
+ "<tt>false</tt> then any orphaned lock file will cause Camel to not attempt to pickup that file, this "
+ "could also be due another node is concurrently reading files from the same shared directory.")
protected boolean readLockDeleteOrphanLockFiles = true;
@UriParam(label = "consumer,lock", defaultValue = "DEBUG", description = "Logging level used when a read lock "
+ "could not be acquired. By default a DEBUG is logged. You can change this level, for example to OFF to "
+ "not have any logging. This option is only applicable for readLock of types: changed, fileLock, "
+ "idempotent, idempotent-changed, idempotent-rename, rename.")
protected LoggingLevel readLockLoggingLevel = LoggingLevel.DEBUG;
@UriParam(label = "consumer,lock", defaultValue = "1", description = "This option is applied only for "
+ "readLock=changed. It allows you to configure a minimum file length. By default Camel expects the file "
+ "to contain data, and thus the default value is 1. You can set this option to zero, to allow consuming "
+ "zero-length files.")
protected long readLockMinLength = 1;
@UriParam(label = "consumer,lock", defaultValue = "0", description = "This option is applied only for "
+ "readLock=changed. It allows to specify a minimum age the file must be before attempting to acquire "
+ "the read lock. For example use readLockMinAge=300s to require the file is at last 5 minutes old. This "
+ "can speedup the changed read lock as it will only attempt to acquire files which are at least "
+ "that given age.")
protected long readLockMinAge;
@UriParam(label = "consumer,lock", defaultValue = "true", description = "This option is applied only for "
+ "readLock=idempotent. It allows to specify whether to remove the file name entry from the idempotent "
+ "repository when processing the file failed and a rollback happens. If this option is false, then the "
+ "file name entry is confirmed (as if the file did a commit).")
protected boolean readLockRemoveOnRollback = true;
@UriParam(label = "consumer,lock", description = "This option is applied only for readLock=idempotent. It allows "
+ "to specify whether to remove the file name entry from the idempotent repository when processing the "
+ "file is succeeded and a commit happens. <p/> By default the file is not removed which ensures that "
+ "any race-condition do not occur so another active node may attempt to grab the file. Instead the "
+ "idempotent repository may support eviction strategies that you can configure to evict the file name "
+ "entry after X minutes - this ensures no problems with race conditions. <p/> See more details at the "
+ "readLockIdempotentReleaseDelay option.")
protected boolean readLockRemoveOnCommit;
@UriParam(label = "consumer,lock", description = "Whether to delay the release task for a period of millis. <p/> "
+ "This can be used to delay the release tasks to expand the window when a file is regarded as "
+ "read-locked, in an active/active cluster scenario with a shared idempotent repository, to ensure "
+ "other nodes cannot potentially scan and acquire the same file, due to race-conditions. By expanding "
+ "the time-window of the release tasks helps prevents these situations. Note delaying is only needed "
+ "if you have configured readLockRemoveOnCommit to true.")
protected int readLockIdempotentReleaseDelay;
@UriParam(label = "consumer,lock", description = "Whether the delayed release task should be synchronous or "
+ "asynchronous. <p/> See more details at the readLockIdempotentReleaseDelay option.")
protected boolean readLockIdempotentReleaseAsync;
@UriParam(label = "consumer,lock", description = "The number of threads in the scheduled thread pool when using "
+ "asynchronous release tasks. Using a default of 1 core threads should be sufficient in almost all "
+ "use-cases, only set this to a higher value if either updating the idempotent repository is slow, or "
+ "there are a lot of files to process. This option is not in-use if you use a shared thread pool by "
+ "configuring the readLockIdempotentReleaseExecutorService option. <p/> See more details at the "
+ "readLockIdempotentReleaseDelay option.")
protected int readLockIdempotentReleaseAsyncPoolSize;
@UriParam(label = "consumer,lock", description = "To use a custom and shared thread pool for asynchronous "
+ "release tasks. <p/> See more details at the readLockIdempotentReleaseDelay option.")
protected ScheduledExecutorService readLockIdempotentReleaseExecutorService;
@UriParam(label = "consumer,lock",
description = "Pluggable read-lock as a org.apache.camel.component.file.GenericFileExclusiveReadLockStrategy implementation.")
protected GenericFileExclusiveReadLockStrategy<T> exclusiveReadLockStrategy;
@UriParam(label = "consumer,advanced",
description = "To use a custom "
+ "{@link org.apache.camel.spi.ExceptionHandler} to handle any thrown exceptions that happens during "
+ "the file on completion process where the consumer does either a commit or rollback. The default "
+ "implementation will log any exception at WARN level and ignore.")
protected ExceptionHandler onCompletionExceptionHandler;
@UriParam(defaultValue = "false", label = "advanced",
description = "Sets whether synchronous processing should be strictly used")
private boolean synchronous;
private Pattern includePattern;
private Pattern excludePattern;
protected GenericFileEndpoint() {
}
protected GenericFileEndpoint(String endpointUri, Component component) {
super(endpointUri, component);
}
@Override
public abstract GenericFileConsumer<T> createConsumer(Processor processor) throws Exception;
@Override
public abstract GenericFileProducer<T> createProducer() throws Exception;
public abstract Exchange createExchange(GenericFile<T> file);
public abstract String getScheme();
public abstract char getFileSeparator();
public abstract boolean isAbsolute(String name);
public boolean isHiddenFilesEnabled() {
return false;
}
/**
* Return the file name that will be auto-generated for the given message if none is provided
*/
public String getGeneratedFileName(Message message) {
return StringHelper.sanitize(message.getMessageId());
}
/**
* This implementation will <b>not</b> load the file content. Any file locking is neither in use by this
* implementation.
*/
@Override
public List<Exchange> getExchanges() {
return getExchanges(browseLimit, null);
}
@Override
public List<Exchange> getExchanges(int limit, java.util.function.Predicate filter) {
return getExchanges(limit, filter, false);
}
@Override
public BrowseStatus getBrowseStatus(int limit) {
List<Exchange> list = getExchanges(limit, null, true);
long ts = 0;
long ts2 = 0;
if (!list.isEmpty()) {
ts = list.get(0).getMessage().getHeader(Exchange.MESSAGE_TIMESTAMP, 0L, long.class);
ts2 = list.get(list.size() - 1).getMessage().getHeader(Exchange.MESSAGE_TIMESTAMP, 0L, long.class);
}
return new BrowseStatus(list.size(), ts, ts2);
}
private List<Exchange> getExchanges(int limit, java.util.function.Predicate filter, boolean status) {
final List<Exchange> answer = new ArrayList<>();
GenericFileConsumer<?> consumer = null;
try {
// create a new consumer which can poll the exchanges we want to browse
// do not provide a processor as we do some custom processing
consumer = createConsumer(null);
if (filter == null) {
consumer.setMaxMessagesPerPoll(browseLimit);
}
if (status) {
// optimize to not download files as we only want status
consumer.setRetrieveFile(false);
}
final GenericFileConsumer gfc = consumer;
consumer.setCustomProcessor(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
boolean include = true;
if (filter != null) {
include = filter.test(exchange);
}
if (include && answer.size() < browseLimit) {
if (!status) {
// ensure payload is downloaded (when not in status mode)
GenericFile<?> gf = exchange.getMessage().getBody(GenericFile.class);
if (gf != null) {
final String name = gf.getAbsoluteFilePath();
try {
boolean downloaded = gfc.tryRetrievingFile(exchange, name, gf, name, gf);
if (downloaded) {
gf.getBinding().loadContent(exchange, gf);
Object data = gf.getBody();
if (data != null) {
exchange.getMessage().setBody(data);
}
}
} catch (Exception e) {
LOG.debug("Error trying to retrieve file: {} due to: {}. This exception is ignored.", name,
e.getMessage(), e);
}
}
}
answer.add(exchange);
}
}
});
// do not start scheduler, as we invoke the poll manually
consumer.setStartScheduler(false);
// start consumer
ServiceHelper.startService(consumer);
// invoke poll which performs the custom processing, so we can
// browse the exchanges
consumer.poll();
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
} finally {
try {
ServiceHelper.stopService(consumer);
} catch (Exception e) {
LOG.debug("Error stopping consumer used for browsing exchanges. This exception will be ignored", e);
}
}
return answer;
}
/**
* A strategy method to lazily create the file strategy
*/
protected abstract GenericFileProcessStrategy<T> createGenericFileStrategy();
public boolean isNoop() {
return noop;
}
/**
* If true, the file is not moved or deleted in any way. This option is good for readonly data, or for ETL type
* requirements. If noop=true, Camel will set idempotent=true as well, to avoid consuming the same files over and
* over again.
*/
public void setNoop(boolean noop) {
this.noop = noop;
}
public boolean isRecursive() {
return recursive;
}
/**
* If a directory, will look for files in all the sub-directories as well.
*/
public void setRecursive(boolean recursive) {
this.recursive = recursive;
}
public String getInclude() {
return include;
}
/**
* Is used to include files, if filename matches the regex pattern (matching is case in-sensitive).
* <p/>
* Notice if you use symbols such as plus sign and others you would need to configure this using the RAW() syntax if
* configuring this as an endpoint uri. See more details at
* <a href= "http://camel.apache.org/how-do-i-configure-endpoints.html">configuring endpoint uris</a>
*/
public void setInclude(String include) {
this.include = include;
this.includePattern = Pattern.compile(include, Pattern.CASE_INSENSITIVE);
}
public Pattern getIncludePattern() {
return includePattern;
}
public String getExclude() {
return exclude;
}
/**
* Is used to exclude files, if filename matches the regex pattern (matching is case in-senstive).
* <p/>
* Notice if you use symbols such as plus sign and others you would need to configure this using the RAW() syntax if
* configuring this as an endpoint uri. See more details at
* <a href= "http://camel.apache.org/how-do-i-configure-endpoints.html">configuring endpoint uris</a>
*/
public void setExclude(String exclude) {
this.exclude = exclude;
this.excludePattern = Pattern.compile(exclude, Pattern.CASE_INSENSITIVE);
}
public Pattern getExcludePattern() {
return this.excludePattern;
}
public String getAntInclude() {
return antInclude;
}
/**
* Ant style filter inclusion. Multiple inclusions may be specified in comma-delimited format.
*/
public void setAntInclude(String antInclude) {
this.antInclude = antInclude;
}
public String getAntExclude() {
return antExclude;
}
/**
* Ant style filter exclusion. If both antInclude and antExclude are used, antExclude takes precedence over
* antInclude. Multiple exclusions may be specified in comma-delimited format.
*/
public void setAntExclude(String antExclude) {
this.antExclude = antExclude;
}
public boolean isAntFilterCaseSensitive() {
return antFilterCaseSensitive;
}
/**
* Sets case sensitive flag on ant filter
*/
public void setAntFilterCaseSensitive(boolean antFilterCaseSensitive) {
this.antFilterCaseSensitive = antFilterCaseSensitive;
}
public AntFilter getAntFilter() {
return antFilter;
}
public String getIncludeExt() {
return includeExt;
}
/**
* Is used to include files matching file extension name (case insensitive). For example to include txt files, then
* use includeExt=txt. Multiple extensions can be separated by comma, for example to include txt and xml files, use
* includeExt=txt,xml
*/
public void setIncludeExt(String includeExt) {
this.includeExt = includeExt;
}
public String getExcludeExt() {
return excludeExt;
}
/**
* Is used to exclude files matching file extension name (case insensitive). For example to exclude bak files, then
* use excludeExt=bak. Multiple extensions can be separated by comma, for example to exclude bak and dat files, use
* excludeExt=bak,dat.
*/
public void setExcludeExt(String excludeExt) {
this.excludeExt = excludeExt;
}
public boolean isPreSort() {
return preSort;
}
/**
* When pre-sort is enabled then the consumer will sort the file and directory names during polling, that was
* retrieved from the file system. You may want to do this in case you need to operate on the files in a sorted
* order. The pre-sort is executed before the consumer starts to filter, and accept files to process by Camel. This
* option is default=false meaning disabled.
*/
public void setPreSort(boolean preSort) {
this.preSort = preSort;
}
public boolean isDelete() {
return delete;
}
/**
* If true, the file will be deleted after it is processed successfully.
*/
public void setDelete(boolean delete) {
this.delete = delete;
}
public boolean isFlatten() {
return flatten;
}
/**
* Flatten is used to flatten the file name path to strip any leading paths, so it's just the file name. This allows
* you to consume recursively into sub-directories, but when you eg write the files to another directory they will
* be written in a single directory. Setting this to true on the producer enforces that any file name in
* CamelFileName header will be stripped for any leading paths.
*/
public void setFlatten(boolean flatten) {
this.flatten = flatten;
}
public Expression getMove() {
return move;
}
/**
* Expression (such as Simple Language) used to dynamically set the filename when moving it after processing. To
* move files into a .done subdirectory just enter .done.
*/
public void setMove(Expression move) {
this.move = move;
}
/**
* @see #setMove(org.apache.camel.Expression)
*/
public void setMove(String fileLanguageExpression) {
String expression = configureMoveOrPreMoveExpression(fileLanguageExpression);
this.move = createFileLanguageExpression(expression);
}
public Expression getMoveFailed() {
return moveFailed;
}
/**
* Sets the move failure expression based on Simple language. For example, to move files into a .error subdirectory
* use: .error. Note: When moving the files to the fail location Camel will handle the error and will not pick up
* the file again.
*/
public void setMoveFailed(Expression moveFailed) {
this.moveFailed = moveFailed;
}
public void setMoveFailed(String fileLanguageExpression) {
String expression = configureMoveOrPreMoveExpression(fileLanguageExpression);
this.moveFailed = createFileLanguageExpression(expression);
}
public Predicate getFilterDirectory() {
return filterDirectory;
}
/**
* Filters the directory based on Simple language. For example to filter on current date, you can use a simple date
* pattern such as ${date:now:yyyMMdd}
*/
public void setFilterDirectory(Predicate filterDirectory) {
this.filterDirectory = filterDirectory;
}
/**
* Filters the directory based on Simple language. For example to filter on current date, you can use a simple date
* pattern such as ${date:now:yyyMMdd}
*
* @see #setFilterDirectory(Predicate)
*/
public void setFilterDirectory(String expression) {
this.filterDirectory = createFileLanguagePredicate(expression);
}
public Predicate getFilterFile() {
return filterFile;
}
/**
* Filters the file based on Simple language. For example to filter on file size, you can use ${file:size} > 5000
*/
public void setFilterFile(Predicate filterFile) {
this.filterFile = filterFile;
}
/**
* Filters the file based on Simple language. For example to filter on file size, you can use ${file:size} > 5000
*
* @see #setFilterFile(Predicate)
*/
public void setFilterFile(String expression) {
this.filterFile = createFileLanguagePredicate(expression);
}
public Expression getPreMove() {
return preMove;
}
/**
* Expression (such as File Language) used to dynamically set the filename when moving it before processing. For
* example to move in-progress files into the order directory set this value to order.
*/
public void setPreMove(Expression preMove) {
this.preMove = preMove;
}
public void setPreMove(String fileLanguageExpression) {
String expression = configureMoveOrPreMoveExpression(fileLanguageExpression);
this.preMove = createFileLanguageExpression(expression);
}
public Expression getMoveExisting() {
return moveExisting;
}
/**
* Expression (such as File Language) used to compute file name to use when fileExist=Move is configured. To move
* files into a backup subdirectory just enter backup. This option only supports the following File Language tokens:
* "file:name", "file:name.ext", "file:name.noext", "file:onlyname", "file:onlyname.noext", "file:ext", and
* "file:parent". Notice the "file:parent" is not supported by the FTP component, as the FTP component can only move
* any existing files to a relative directory based on current dir as base.
*/
public void setMoveExisting(Expression moveExisting) {
this.moveExisting = moveExisting;
}
public FileMoveExistingStrategy getMoveExistingFileStrategy() {
return moveExistingFileStrategy;
}
/**
* Strategy (Custom Strategy) used to move file with special naming token to use when fileExist=Move is configured.
* By default, there is an implementation used if no custom strategy is provided
*/
public void setMoveExistingFileStrategy(FileMoveExistingStrategy moveExistingFileStrategy) {
this.moveExistingFileStrategy = moveExistingFileStrategy;
}
public void setMoveExisting(String fileLanguageExpression) {
String expression = configureMoveOrPreMoveExpression(fileLanguageExpression);
this.moveExisting = createFileLanguageExpression(expression);
}
public Expression getFileName() {
return fileName;
}
/**
* Use Expression such as File Language to dynamically set the filename. For consumers, it's used as a filename
* filter. For producers, it's used to evaluate the filename to write. If an expression is set, it take precedence
* over the CamelFileName header. (Note: The header itself can also be an Expression). The expression options
* support both String and Expression types. If the expression is a String type, it is always evaluated using the
* File Language. If the expression is an Expression type, the specified Expression type is used - this allows you,
* for instance, to use OGNL expressions. For the consumer, you can use it to filter filenames, so you can for
* instance consume today's file using the File Language syntax: mydata-${date:now:yyyyMMdd}.txt. The producers
* support the CamelOverruleFileName header which takes precedence over any existing CamelFileName header; the
* CamelOverruleFileName is a header that is used only once, and makes it easier as this avoids to temporary store
* CamelFileName and have to restore it afterwards.
*/
public void setFileName(Expression fileName) {
this.fileName = fileName;
}
public void setFileName(String fileLanguageExpression) {
this.fileName = createFileLanguageExpression(fileLanguageExpression);
}
public String getDoneFileName() {
return doneFileName;
}
/**
* Producer: If provided, then Camel will write a 2nd done file when the original file has been written. The done
* file will be empty. This option configures what file name to use. Either you can specify a fixed name. Or you can
* use dynamic placeholders. The done file will always be written in the same folder as the original file.
* <p/>
* Consumer: If provided, Camel will only consume files if a done file exists. This option configures what file name
* to use. Either you can specify a fixed name. Or you can use dynamic placeholders.The done file is always expected
* in the same folder as the original file.
* <p/>
* Only ${file.name} and ${file.name.noext} is supported as dynamic placeholders.
*/
public void setDoneFileName(String doneFileName) {
this.doneFileName = doneFileName;
}
@Override
public int getBrowseLimit() {
return browseLimit;
}
@Override
public void setBrowseLimit(int browseLimit) {
this.browseLimit = browseLimit;
}
public Boolean isIdempotent() {
return idempotent != null ? idempotent : false;
}
public boolean isIdempotentEager() {
return idempotentEager != null ? idempotentEager : false;
}
public String getCharset() {
return charset;
}
/**
* This option is used to specify the encoding of the file. You can use this on the consumer, to specify the
* encodings of the files, which allow Camel to know the charset it should load the file content in case the file
* content is being accessed. Likewise when writing a file, you can use this option to specify which charset to
* write the file as well. Do mind that when writing the file Camel may have to read the message content into memory
* to be able to convert the data into the configured charset, so do not use this if you have big messages.
*/
public void setCharset(String charset) {
IOHelper.validateCharset(charset);
this.charset = charset;
}
protected boolean isIdempotentSet() {
return idempotent != null;
}
public Boolean getIdempotent() {
return idempotent;
}
/**
* Option to use the Idempotent Consumer EIP pattern to let Camel skip already processed files. Will by default use
* a memory based LRUCache that holds 1000 entries. If noop=true then idempotent will be enabled as well to avoid
* consuming the same files over and over again.
*/
public void setIdempotent(Boolean idempotent) {
this.idempotent = idempotent;
}
public Boolean getIdempotentEager() {
return idempotentEager;
}
/**
* Sets whether to eagerly add the key to the idempotent repository or wait until the exchange is complete.
*/
public void setIdempotentEager(Boolean idempotentEager) {
this.idempotentEager = idempotentEager;
}
public Expression getIdempotentKey() {
return idempotentKey;
}
/**
* To use a custom idempotent key. By default the absolute path of the file is used. You can use the File Language,
* for example to use the file name and file size, you can do: idempotentKey=${file:name}-${file:size}
*/
public void setIdempotentKey(Expression idempotentKey) {
this.idempotentKey = idempotentKey;
}
public void setIdempotentKey(String expression) {
this.idempotentKey = createFileLanguageExpression(expression);
}
public IdempotentRepository getIdempotentRepository() {
return idempotentRepository;
}
/**
* A pluggable repository org.apache.camel.spi.IdempotentRepository which by default use MemoryIdempotentRepository
* if none is specified and idempotent is true.
*/
public void setIdempotentRepository(IdempotentRepository idempotentRepository) {
this.idempotentRepository = idempotentRepository;
}
public GenericFileFilter<T> getFilter() {
return filter;
}
/**
* Pluggable filter as a org.apache.camel.component.file.GenericFileFilter class. Will skip files if filter returns
* false in its accept() method.
*/
public void setFilter(GenericFileFilter<T> filter) {
this.filter = filter;
}
public Comparator<GenericFile<T>> getSorter() {
return sorter;
}
/**
* Pluggable sorter as a java.util.Comparator<org.apache.camel.component.file.GenericFile> class.
*/
public void setSorter(Comparator<GenericFile<T>> sorter) {
this.sorter = sorter;
}
public Comparator<Exchange> getSortBy() {
return sortBy;
}
/**
* Built-in sort by using the File Language. Supports nested sorts, so you can have a sort by file name and as a 2nd
* group sort by modified date.
*/
public void setSortBy(Comparator<Exchange> sortBy) {
this.sortBy = sortBy;
}
public void setSortBy(String expression) {
setSortBy(expression, false);
}
public void setSortBy(String expression, boolean reverse) {
setSortBy(GenericFileDefaultSorter.sortByFileLanguage(getCamelContext(), expression, reverse));
}
public boolean isShuffle() {
return shuffle;
}
/**
* To shuffle the list of files (sort in random order)
*/
public void setShuffle(boolean shuffle) {
this.shuffle = shuffle;
}
public String getTempPrefix() {
return tempPrefix;
}
/**
* This option is used to write the file using a temporary name and then, after the write is complete, rename it to
* the real name. Can be used to identify files being written and also avoid consumers (not using exclusive read
* locks) reading in progress files. Is often used by FTP when uploading big files.
*/
public void setTempPrefix(String tempPrefix) {
this.tempPrefix = tempPrefix;
// use only name as we set a prefix in from on the name
setTempFileName(tempPrefix + "${file:onlyname}");
}
public Expression getTempFileName() {
return tempFileName;
}
/**
* The same as tempPrefix option but offering a more fine grained control on the naming of the temporary filename as
* it uses the File Language. The location for tempFilename is relative to the final file location in the option
* 'fileName', not the target directory in the base uri. For example if option fileName includes a directory prefix:
* dir/finalFilename then tempFileName is relative to that subdirectory dir.
*/
public void setTempFileName(Expression tempFileName) {
this.tempFileName = tempFileName;
}
public void setTempFileName(String tempFileNameExpression) {
this.tempFileName = createFileLanguageExpression(tempFileNameExpression);
}
public boolean isEagerDeleteTargetFile() {
return eagerDeleteTargetFile;
}
/**
* Whether or not to eagerly delete any existing target file. This option only applies when you use
* fileExists=Override and the tempFileName option as well. You can use this to disable (set it to false) deleting
* the target file before the temp file is written. For example you may write big files and want the target file to
* exists during the temp file is being written. This ensure the target file is only deleted until the very last
* moment, just before the temp file is being renamed to the target filename. This option is also used to control
* whether to delete any existing files when fileExist=Move is enabled, and an existing file exists. If this option
* copyAndDeleteOnRenameFails false, then an exception will be thrown if an existing file existed, if its true, then
* the existing file is deleted before the move operation.
*/
public void setEagerDeleteTargetFile(boolean eagerDeleteTargetFile) {
this.eagerDeleteTargetFile = eagerDeleteTargetFile;
}
public GenericFileConfiguration getConfiguration() {
if (configuration == null) {
configuration = new GenericFileConfiguration();
}
return configuration;
}
public void setConfiguration(GenericFileConfiguration configuration) {
this.configuration = configuration;
}
public GenericFileExclusiveReadLockStrategy<T> getExclusiveReadLockStrategy() {
return exclusiveReadLockStrategy;
}
/**
* Pluggable read-lock as a org.apache.camel.component.file.GenericFileExclusiveReadLockStrategy implementation.
*/
public void setExclusiveReadLockStrategy(GenericFileExclusiveReadLockStrategy<T> exclusiveReadLockStrategy) {
this.exclusiveReadLockStrategy = exclusiveReadLockStrategy;
}
public String getReadLock() {
return readLock;
}
/**
* Used by consumer, to only poll the files if it has exclusive read-lock on the file (i.e. the file is not
* in-progress or being written). Camel will wait until the file lock is granted. This option provides the build in
* strategies:\n\n - none - No read lock is in use\n - markerFile - Camel creates a marker file (fileName.camelLock)
* and then holds a lock on it. This option is not available for the FTP component\n - changed - Changed is using
* file length/modification timestamp to detect whether the file is currently being copied or not. Will at least use
* 1 sec to determine this, so this option cannot consume files as fast as the others, but can be more reliable as
* the JDK IO API cannot always determine whether a file is currently being used by another process. The option
* readLockCheckInterval can be used to set the check frequency.\n - fileLock - is for using
* java.nio.channels.FileLock. This option is not avail for Windows OS and the FTP component. This approach should
* be avoided when accessing a remote file system via a mount/share unless that file system supports distributed
* file locks.\n - rename - rename is for using a try to rename the file as a test if we can get exclusive
* read-lock.\n - idempotent - (only for file component) idempotent is for using a idempotentRepository as the
* read-lock. This allows to use read locks that supports clustering if the idempotent repository implementation
* supports that.\n - idempotent-changed - (only for file component) idempotent-changed is for using a
* idempotentRepository and changed as the combined read-lock. This allows to use read locks that supports
* clustering if the idempotent repository implementation supports that.\n - idempotent-rename - (only for file
* component) idempotent-rename is for using a idempotentRepository and rename as the combined read-lock. This
* allows to use read locks that supports clustering if the idempotent repository implementation supports that.\n
* \nNotice: The various read locks is not all suited to work in clustered mode, where concurrent consumers on
* different nodes is competing for the same files on a shared file system. The markerFile using a close to atomic
* operation to create the empty marker file, but its not guaranteed to work in a cluster. The fileLock may work
* better but then the file system need to support distributed file locks, and so on. Using the idempotent read lock
* can support clustering if the idempotent repository supports clustering, such as Hazelcast Component or
* Infinispan.
*/
public void setReadLock(String readLock) {
this.readLock = readLock;
}
public long getReadLockCheckInterval() {
return readLockCheckInterval;
}
/**
* Interval in millis for the read-lock, if supported by the read lock. This interval is used for sleeping between
* attempts to acquire the read lock. For example when using the changed read lock, you can set a higher interval
* period to cater for slow writes. The default of 1 sec. may be too fast if the producer is very slow writing the
* file.
* <p/>
* Notice: For FTP the default readLockCheckInterval is 5000.
* <p/>
* The readLockTimeout value must be higher than readLockCheckInterval, but a rule of thumb is to have a timeout
* that is at least 2 or more times higher than the readLockCheckInterval. This is needed to ensure that ample time
* is allowed for the read lock process to try to grab the lock before the timeout was hit.
*/
public void setReadLockCheckInterval(long readLockCheckInterval) {
this.readLockCheckInterval = readLockCheckInterval;
}
public long getReadLockTimeout() {
return readLockTimeout;
}
/**
* Optional timeout in millis for the read-lock, if supported by the read-lock. If the read-lock could not be
* granted and the timeout triggered, then Camel will skip the file. At next poll Camel, will try the file again,
* and this time maybe the read-lock could be granted. Use a value of 0 or lower to indicate forever. Currently
* fileLock, changed and rename support the timeout.
* <p/>
* Notice: For FTP the default readLockTimeout value is 20000 instead of 10000.
* <p/>
* The readLockTimeout value must be higher than readLockCheckInterval, but a rule of thumb is to have a timeout
* that is at least 2 or more times higher than the readLockCheckInterval. This is needed to ensure that ample time
* is allowed for the read lock process to try to grab the lock before the timeout was hit.
*/
public void setReadLockTimeout(long readLockTimeout) {
this.readLockTimeout = readLockTimeout;
}
public boolean isReadLockMarkerFile() {
return readLockMarkerFile;
}
/**
* Whether to use marker file with the changed, rename, or exclusive read lock types. By default a marker file is
* used as well to guard against other processes picking up the same files. This behavior can be turned off by
* setting this option to false. For example if you do not want to write marker files to the file systems by the
* Camel application.
*/
public void setReadLockMarkerFile(boolean readLockMarkerFile) {
this.readLockMarkerFile = readLockMarkerFile;
}
public boolean isReadLockDeleteOrphanLockFiles() {
return readLockDeleteOrphanLockFiles;
}
/**
* Whether or not read lock with marker files should upon startup delete any orphan read lock files, which may have
* been left on the file system, if Camel was not properly shutdown (such as a JVM crash).
* <p/>
* If turning this option to <tt>false</tt> then any orphaned lock file will cause Camel to not attempt to pickup
* that file, this could also be due another node is concurrently reading files from the same shared directory.
*/
public void setReadLockDeleteOrphanLockFiles(boolean readLockDeleteOrphanLockFiles) {
this.readLockDeleteOrphanLockFiles = readLockDeleteOrphanLockFiles;
}
public LoggingLevel getReadLockLoggingLevel() {
return readLockLoggingLevel;
}
/**
* Logging level used when a read lock could not be acquired. By default a DEBUG is logged. You can change this
* level, for example to OFF to not have any logging. This option is only applicable for readLock of types: changed,
* fileLock, idempotent, idempotent-changed, idempotent-rename, rename.
*/
public void setReadLockLoggingLevel(LoggingLevel readLockLoggingLevel) {
this.readLockLoggingLevel = readLockLoggingLevel;
}
public long getReadLockMinLength() {
return readLockMinLength;
}
/**
* This option is applied only for readLock=changed. It allows you to configure a minimum file length. By default
* Camel expects the file to contain data, and thus the default value is 1. You can set this option to zero, to
* allow consuming zero-length files.
*/
public void setReadLockMinLength(long readLockMinLength) {
this.readLockMinLength = readLockMinLength;
}
public long getReadLockMinAge() {
return readLockMinAge;
}
/**
* This option is applied only for readLock=changed. It allows to specify a minimum age the file must be before
* attempting to acquire the read lock. For example use readLockMinAge=300s to require the file is at last 5 minutes
* old. This can speedup the changed read lock as it will only attempt to acquire files which are at least that
* given age.
*/
public void setReadLockMinAge(long readLockMinAge) {
this.readLockMinAge = readLockMinAge;
}
public boolean isReadLockRemoveOnRollback() {
return readLockRemoveOnRollback;
}
/**
* This option is applied only for readLock=idempotent. It allows to specify whether to remove the file name entry
* from the idempotent repository when processing the file failed and a rollback happens. If this option is false,
* then the file name entry is confirmed (as if the file did a commit).
*/
public void setReadLockRemoveOnRollback(boolean readLockRemoveOnRollback) {
this.readLockRemoveOnRollback = readLockRemoveOnRollback;
}
public boolean isReadLockRemoveOnCommit() {
return readLockRemoveOnCommit;
}
/**
* This option is applied only for readLock=idempotent. It allows to specify whether to remove the file name entry
* from the idempotent repository when processing the file is succeeded and a commit happens.
* <p/>
* By default the file is not removed which ensures that any race-condition do not occur so another active node may
* attempt to grab the file. Instead the idempotent repository may support eviction strategies that you can
* configure to evict the file name entry after X minutes - this ensures no problems with race conditions.
* <p/>
* See more details at the readLockIdempotentReleaseDelay option.
*/
public void setReadLockRemoveOnCommit(boolean readLockRemoveOnCommit) {
this.readLockRemoveOnCommit = readLockRemoveOnCommit;
}
public int getReadLockIdempotentReleaseDelay() {
return readLockIdempotentReleaseDelay;
}
/**
* Whether to delay the release task for a period of millis.
* <p/>
* This can be used to delay the release tasks to expand the window when a file is regarded as read-locked, in an
* active/active cluster scenario with a shared idempotent repository, to ensure other nodes cannot potentially scan
* and acquire the same file, due to race-conditions. By expanding the time-window of the release tasks helps
* prevents these situations. Note delaying is only needed if you have configured readLockRemoveOnCommit to true.
*/
public void setReadLockIdempotentReleaseDelay(int readLockIdempotentReleaseDelay) {
this.readLockIdempotentReleaseDelay = readLockIdempotentReleaseDelay;
}
public boolean isReadLockIdempotentReleaseAsync() {
return readLockIdempotentReleaseAsync;
}
/**
* Whether the delayed release task should be synchronous or asynchronous.
* <p/>
* See more details at the readLockIdempotentReleaseDelay option.
*/
public void setReadLockIdempotentReleaseAsync(boolean readLockIdempotentReleaseAsync) {
this.readLockIdempotentReleaseAsync = readLockIdempotentReleaseAsync;
}
public int getReadLockIdempotentReleaseAsyncPoolSize() {
return readLockIdempotentReleaseAsyncPoolSize;
}
/**
* The number of threads in the scheduled thread pool when using asynchronous release tasks. Using a default of 1
* core threads should be sufficient in almost all use-cases, only set this to a higher value if either updating the
* idempotent repository is slow, or there are a lot of files to process. This option is not in-use if you use a
* shared thread pool by configuring the readLockIdempotentReleaseExecutorService option.
* <p/>
* See more details at the readLockIdempotentReleaseDelay option.
*/
public void setReadLockIdempotentReleaseAsyncPoolSize(int readLockIdempotentReleaseAsyncPoolSize) {
this.readLockIdempotentReleaseAsyncPoolSize = readLockIdempotentReleaseAsyncPoolSize;
}
public ScheduledExecutorService getReadLockIdempotentReleaseExecutorService() {
return readLockIdempotentReleaseExecutorService;
}
/**
* To use a custom and shared thread pool for asynchronous release tasks.
* <p/>
* See more details at the readLockIdempotentReleaseDelay option.
*/
public void setReadLockIdempotentReleaseExecutorService(ScheduledExecutorService readLockIdempotentReleaseExecutorService) {
this.readLockIdempotentReleaseExecutorService = readLockIdempotentReleaseExecutorService;
}
public int getBufferSize() {
return bufferSize;
}
/**
* Buffer size in bytes used for writing files (or in case of FTP for downloading and uploading files).
*/
public void setBufferSize(int bufferSize) {
if (bufferSize <= 0) {
throw new IllegalArgumentException("BufferSize must be a positive value, was " + bufferSize);
}
this.bufferSize = bufferSize;
}
public GenericFileExist getFileExist() {
return fileExist;
}
/**
* What to do if a file already exists with the same name. Override, which is the default, replaces the existing
* file. \n\n - Append - adds content to the existing file.\n - Fail - throws a GenericFileOperationException,
* indicating that there is already an existing file.\n - Ignore - silently ignores the problem and does not
* override the existing file, but assumes everything is okay.\n - Move - option requires to use the moveExisting
* option to be configured as well. The option eagerDeleteTargetFile can be used to control what to do if an moving
* the file, and there exists already an existing file, otherwise causing the move operation to fail. The Move
* option will move any existing files, before writing the target file.\n - TryRename is only applicable if
* tempFileName option is in use. This allows to try renaming the file from the temporary name to the actual name,
* without doing any exists check. This check may be faster on some file systems and especially FTP servers.
*/
public void setFileExist(GenericFileExist fileExist) {
this.fileExist = fileExist;
}
public boolean isAutoCreate() {
return autoCreate;
}
/**
* Automatically create missing directories in the file's pathname. For the file consumer, that means creating the
* starting directory. For the file producer, it means the directory the files should be written to.
*/
public void setAutoCreate(boolean autoCreate) {
this.autoCreate = autoCreate;
}
public GenericFileProcessStrategy<T> getProcessStrategy() {
return processStrategy;
}
/**
* A pluggable org.apache.camel.component.file.GenericFileProcessStrategy allowing you to implement your own
* readLock option or similar. Can also be used when special conditions must be met before a file can be consumed,
* such as a special ready file exists. If this option is set then the readLock option does not apply.
*/
public void setProcessStrategy(GenericFileProcessStrategy<T> processStrategy) {
this.processStrategy = processStrategy;
}
public String getLocalWorkDirectory() {
return localWorkDirectory;
}
/**
* When consuming, a local work directory can be used to store the remote file content directly in local files, to
* avoid loading the content into memory. This is beneficial, if you consume a very big remote file and thus can
* conserve memory.
*/
public void setLocalWorkDirectory(String localWorkDirectory) {
this.localWorkDirectory = localWorkDirectory;
}
public int getMaxMessagesPerPoll() {
return maxMessagesPerPoll;
}
/**
* To define a maximum messages to gather per poll. By default no maximum is set. Can be used to set a limit of e.g.
* 1000 to avoid when starting up the server that there are thousands of files. Set a value of 0 or negative to
* disabled it. Notice: If this option is in use then the File and FTP components will limit before any sorting. For
* example if you have 100000 files and use maxMessagesPerPoll=500, then only the first 500 files will be picked up,
* and then sorted. You can use the eagerMaxMessagesPerPoll option and set this to false to allow to scan all files
* first and then sort afterwards.
*/
public void setMaxMessagesPerPoll(int maxMessagesPerPoll) {
this.maxMessagesPerPoll = maxMessagesPerPoll;
}
public boolean isEagerMaxMessagesPerPoll() {
return eagerMaxMessagesPerPoll;
}
/**
* Allows for controlling whether the limit from maxMessagesPerPoll is eager or not. If eager then the limit is
* during the scanning of files. Where as false would scan all files, and then perform sorting. Setting this option
* to false allows for sorting all files first, and then limit the poll. Mind that this requires a higher memory
* usage as all file details are in memory to perform the sorting.
*/
public void setEagerMaxMessagesPerPoll(boolean eagerMaxMessagesPerPoll) {
this.eagerMaxMessagesPerPoll = eagerMaxMessagesPerPoll;
}
public int getMaxDepth() {
return maxDepth;
}
/**
* The maximum depth to traverse when recursively processing a directory.
*/
public void setMaxDepth(int maxDepth) {
this.maxDepth = maxDepth;
}
public int getMinDepth() {
return minDepth;
}
/**
* The minimum depth to start processing when recursively processing a directory. Using minDepth=1 means the base
* directory. Using minDepth=2 means the first sub directory.
*/
public void setMinDepth(int minDepth) {
this.minDepth = minDepth;
}
public IdempotentRepository getInProgressRepository() {
return inProgressRepository;
}
/**
* A pluggable in-progress repository org.apache.camel.spi.IdempotentRepository. The in-progress repository is used
* to account the current in progress files being consumed. By default a memory based repository is used.
*/
public void setInProgressRepository(IdempotentRepository inProgressRepository) {
this.inProgressRepository = inProgressRepository;
}
public boolean isKeepLastModified() {
return keepLastModified;
}
/**
* Will keep the last modified timestamp from the source file (if any). Will use the
* FileConstants.FILE_LAST_MODIFIED header to located the timestamp. This header can contain either a java.util.Date
* or long with the timestamp. If the timestamp exists and the option is enabled it will set this timestamp on the
* written file. Note: This option only applies to the file producer. You cannot use this option with any of the ftp
* producers.
*/
public void setKeepLastModified(boolean keepLastModified) {
this.keepLastModified = keepLastModified;
}
public boolean isAllowNullBody() {
return allowNullBody;
}
/**
* Used to specify if a null body is allowed during file writing. If set to true then an empty file will be created,
* when set to false, and attempting to send a null body to the file component, a GenericFileWriteException of
* 'Cannot write null body to file.' will be thrown. If the `fileExist` option is set to 'Override', then the file
* will be truncated, and if set to `append` the file will remain unchanged.
*/
public void setAllowNullBody(boolean allowNullBody) {
this.allowNullBody = allowNullBody;
}
public boolean isJailStartingDirectory() {
return jailStartingDirectory;
}
/**
* Used for jailing (restricting) writing files to the starting directory (and sub) only. This is enabled by default
* to not allow Camel to write files to outside directories (to be more secured out of the box). You can turn this
* off to allow writing files to directories outside the starting directory, such as parent or root folders.
*/
public void setJailStartingDirectory(boolean jailStartingDirectory) {
this.jailStartingDirectory = jailStartingDirectory;
}
public String getAppendChars() {
return appendChars;
}
/**
* Used to append characters (text) after writing files. This can for example be used to add new lines or other
* separators when writing and appending to new files or existing files.
* <p/>
* To specify new-line (slash-n or slash-r) or tab (slash-t) characters then escape with an extra slash, eg
* slash-slash-n
*/
public void setAppendChars(String appendChars) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < appendChars.length(); i++) {
char ch = appendChars.charAt(i);
boolean escaped = '\\' == ch;
if (escaped && i < appendChars.length() - 1) {
// grab next character to escape
char next = appendChars.charAt(i + 1);
// special for new line, tabs and carriage return
if ('n' == next) {
sb.append("\n");
i++;
continue;
} else if ('t' == next) {
sb.append("\t");
i++;
continue;
} else if ('r' == next) {
sb.append("\r");
i++;
continue;
}
}
// not special just a regular character
sb.append(ch);
}
this.appendChars = sb.toString();
}
public ExceptionHandler getOnCompletionExceptionHandler() {
return onCompletionExceptionHandler;
}
/**
* To use a custom {@link org.apache.camel.spi.ExceptionHandler} to handle any thrown exceptions that happens during
* the file on completion process where the consumer does either a commit or rollback. The default implementation
* will log any exception at WARN level and ignore.
*/
public void setOnCompletionExceptionHandler(ExceptionHandler onCompletionExceptionHandler) {
this.onCompletionExceptionHandler = onCompletionExceptionHandler;
}
public boolean isSynchronous() {
return synchronous;
}
public void setSynchronous(boolean synchronous) {
this.synchronous = synchronous;
}
public String getChecksumFileAlgorithm() {
return checksumFileAlgorithm;
}
/**
* If provided, then Camel will write a checksum file when the original file has been written. The checksum file
* will contain the checksum created with the provided algorithm for the original file. The checksum file will
* always be written in the same folder as the original file.
*/
public void setChecksumFileAlgorithm(String checksumFileAlgorithm) {
this.checksumFileAlgorithm = checksumFileAlgorithm;
}
/**
* Configures the given message with the file which sets the body to the file object.
*/
public void configureMessage(GenericFile<T> file, Message message) {
message.setBody(file);
if (flatten) {
// when flatten the file name should not contain any paths
message.setHeader(FileConstants.FILE_NAME, file.getFileNameOnly());
} else {
// compute name to set on header that should be relative to starting
// directory
String name = file.isAbsolute() ? file.getAbsoluteFilePath() : file.getRelativeFilePath();
// skip leading endpoint configured directory
String endpointPath = getConfiguration().getDirectory() + getFileSeparator();
// need to normalize paths to ensure we can match using startsWith
endpointPath = FileUtil.normalizePath(endpointPath);
String copyOfName = FileUtil.normalizePath(name);
if (org.apache.camel.util.ObjectHelper.isNotEmpty(endpointPath) && copyOfName.startsWith(endpointPath)) {
name = name.substring(endpointPath.length());
}
// adjust filename
message.setHeader(FileConstants.FILE_NAME, name);
}
}
/**
* Set up the exchange properties with the options of the file endpoint
*/
@Override
public void configureExchange(Exchange exchange) {
// Now we just set the charset property here
if (getCharset() != null) {
exchange.setProperty(ExchangePropertyKey.CHARSET_NAME, getCharset());
}
}
/**
* Strategy to configure the move, preMove, or moveExisting option based on a String input.
*
* @param expression the original string input
* @return configured string or the original if no modifications is needed
*/
protected String configureMoveOrPreMoveExpression(String expression) {
// if the expression already have ${ } placeholders then pass it
// unmodified
if (isSimpleLanguage(expression)) {
return expression;
}
// remove trailing slash
expression = FileUtil.stripTrailingSeparator(expression);
StringBuilder sb = new StringBuilder(64);
// if relative then insert start with the parent folder
if (!isAbsolute(expression)) {
sb.append("${file:parent}");
sb.append(getFileSeparator());
}
// insert the directory the end user provided
sb.append(expression);
// append only the filename (file:name can contain a relative path, so
// we must use onlyname)
sb.append(getFileSeparator());
sb.append("${file:onlyname}");
return sb.toString();
}
protected Map<String, Object> getParamsAsMap() {
Map<String, Object> params = new HashMap<>();
if (isNoop()) {
params.put("noop", Boolean.toString(true));
}
if (isDelete()) {
params.put("delete", Boolean.toString(true));
}
if (move != null) {
params.put("move", move);
}
if (moveFailed != null) {
params.put("moveFailed", moveFailed);
}
if (preMove != null) {
params.put("preMove", preMove);
}
if (exclusiveReadLockStrategy != null) {
params.put("exclusiveReadLockStrategy", exclusiveReadLockStrategy);
}
if (readLock != null) {
params.put("readLock", readLock);
}
if ("idempotent".equals(readLock) || "idempotent-changed".equals(readLock) || "idempotent-rename".equals(readLock)) {
params.put("readLockIdempotentRepository", idempotentRepository);
}
if (readLockCheckInterval > 0) {
params.put("readLockCheckInterval", readLockCheckInterval);
}
if (readLockTimeout > 0) {
params.put("readLockTimeout", readLockTimeout);
}
params.put("readLockMarkerFile", readLockMarkerFile);
params.put("readLockDeleteOrphanLockFiles", readLockDeleteOrphanLockFiles);
params.put("readLockMinLength", readLockMinLength);
params.put("readLockLoggingLevel", readLockLoggingLevel);
params.put("readLockMinAge", readLockMinAge);
params.put("readLockRemoveOnRollback", readLockRemoveOnRollback);
params.put("readLockRemoveOnCommit", readLockRemoveOnCommit);
if (readLockIdempotentReleaseDelay > 0) {
params.put("readLockIdempotentReleaseDelay", readLockIdempotentReleaseDelay);
}
params.put("readLockIdempotentReleaseAsync", readLockIdempotentReleaseAsync);
if (readLockIdempotentReleaseAsyncPoolSize > 0) {
params.put("readLockIdempotentReleaseAsyncPoolSize", readLockIdempotentReleaseAsyncPoolSize);
}
if (readLockIdempotentReleaseExecutorService != null) {
params.put("readLockIdempotentReleaseExecutorService", readLockIdempotentReleaseExecutorService);
}
return params;
}
private Expression createFileLanguageExpression(String expression) {
Language language;
// only use file language if the name is complex (eg. using $)
if (expression.contains("$")) {
language = getCamelContext().resolveLanguage("file");
} else {
language = getCamelContext().resolveLanguage("constant");
}
return language.createExpression(expression);
}
private Predicate createFileLanguagePredicate(String expression) {
Language language = getCamelContext().resolveLanguage("file");
return language.createPredicate(expression);
}
/**
* Creates the associated name of the done file based on the given file name.
* <p/>
* This method should only be invoked if a done filename property has been set on this endpoint.
*
* @param fileName the file name
* @return name of the associated done file name
*/
protected String createDoneFileName(String fileName) {
String pattern = getDoneFileName();
StringHelper.notEmpty(pattern, "doneFileName", pattern);
// we only support ${file:name} or ${file:name.noext} as dynamic
// placeholders for done files
String path = FileUtil.onlyPath(fileName);
String onlyName = Matcher.quoteReplacement(FileUtil.stripPath(fileName));
pattern = pattern.replaceFirst("\\$\\{file:name\\}", onlyName);
pattern = pattern.replaceFirst("\\$simple\\{file:name\\}", onlyName);
pattern = pattern.replaceFirst("\\$\\{file:name.noext\\}", FileUtil.stripExt(onlyName, true));
pattern = pattern.replaceFirst("\\$simple\\{file:name.noext\\}", FileUtil.stripExt(onlyName, true));
// must be able to resolve all placeholders supported
if (isSimpleLanguage(pattern)) {
throw new ExpressionIllegalSyntaxException(fileName + ". Cannot resolve reminder: " + pattern);
}
String answer = pattern;
if (org.apache.camel.util.ObjectHelper.isNotEmpty(path) && org.apache.camel.util.ObjectHelper.isNotEmpty(pattern)) {
// done file must always be in same directory as the real file name
answer = path + getFileSeparator() + pattern;
}
if (getConfiguration().needToNormalize()) {
// must normalize path to cater for Windows and other OS
answer = FileUtil.normalizePath(answer);
}
return answer;
}
/**
* Is the given file a done file?
* <p/>
* This method should only be invoked if a done filename property has been set on this endpoint.
*
* @param fileName the file name
* @return <tt>true</tt> if its a done file, <tt>false</tt> otherwise
*/
protected boolean isDoneFile(String fileName) {
String pattern = getDoneFileName();
StringHelper.notEmpty(pattern, "doneFileName", pattern);
if (!isSimpleLanguage(pattern)) {
// no tokens, so just match names directly
return pattern.equals(fileName);
}
// the static part of the pattern, is that a prefix or suffix?
// it is a prefix if ${ start token is not at the start of the pattern
boolean prefix = pattern.indexOf("${") > 0; // NOSONAR
// remove dynamic parts of the pattern so we only got the static part
// left
pattern = pattern.replaceFirst("\\$\\{file:name\\}", "");
pattern = pattern.replaceFirst("\\$simple\\{file:name\\}", "");
pattern = pattern.replaceFirst("\\$\\{file:name.noext\\}", "");
pattern = pattern.replaceFirst("\\$simple\\{file:name.noext\\}", "");
// must be able to resolve all placeholders supported
if (isSimpleLanguage(pattern)) {
throw new ExpressionIllegalSyntaxException(fileName + ". Cannot resolve reminder: " + pattern);
}
if (prefix) {
return fileName.startsWith(pattern);
} else {
return fileName.endsWith(pattern);
}
}
private static boolean isSimpleLanguage(String pattern) {
return StringHelper.hasStartToken(pattern, "simple");
}
@Override
protected void doInit() throws Exception {
super.doInit();
// validate that the read lock options is valid for the process strategy
if (!"none".equals(readLock) && !"off".equals(readLock)) {
if (readLockTimeout > 0 && readLockTimeout <= readLockCheckInterval) {
throw new IllegalArgumentException(
"The option readLockTimeout must be higher than readLockCheckInterval" + ", was readLockTimeout="
+ readLockTimeout
+ ", readLockCheckInterval=" + readLockCheckInterval
+ ". A good practice is to let the readLockTimeout be at least 3 times higher than the readLockCheckInterval"
+ " to ensure that the read lock procedure has enough time to acquire the lock.");
}
}
if ("idempotent".equals(readLock) && idempotentRepository == null) {
throw new IllegalArgumentException("IdempotentRepository must be configured when using readLock=idempotent");
}
if ("fileLock".equals(readLock) && FileUtil.isWindows()) {
throw new IllegalArgumentException("The readLock=fileLock option is not supported on Windows");
}
if (antInclude != null) {
if (antFilter == null) {
antFilter = new AntFilter();
}
antFilter.setIncludes(antInclude);
}
if (antExclude != null) {
if (antFilter == null) {
antFilter = new AntFilter();
}
antFilter.setExcludes(antExclude);
}
if (antFilter != null) {
antFilter.setCaseSensitive(antFilterCaseSensitive);
}
// initialize expressions
if (fileName != null) {
fileName.init(getCamelContext());
}
if (tempFileName != null) {
tempFileName.init(getCamelContext());
}
if (move != null) {
move.init(getCamelContext());
}
if (moveFailed != null) {
moveFailed.init(getCamelContext());
}
if (preMove != null) {
preMove.init(getCamelContext());
}
if (moveExisting != null) {
moveExisting.init(getCamelContext());
}
if (idempotentKey != null) {
idempotentKey.init(getCamelContext());
}
}
@Override
protected void doStart() throws Exception {
// idempotent repository may be used by others, so add it as a service
// so its stopped when CamelContext stops
if (idempotentRepository != null) {
getCamelContext().addService(idempotentRepository, true);
}
ServiceHelper.startService(inProgressRepository);
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
ServiceHelper.stopService(inProgressRepository);
}
}
|
GenericFileEndpoint
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/common/util/LocaleUtilsTests.java
|
{
"start": 615,
"end": 2270
}
|
class ____ extends ESTestCase {
public void testIllegalLang() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> LocaleUtils.parse("yz"));
assertThat(e.getMessage(), Matchers.containsString("Unknown language: yz"));
e = expectThrows(IllegalArgumentException.class, () -> LocaleUtils.parse("yz-CA"));
assertThat(e.getMessage(), Matchers.containsString("Unknown language: yz"));
}
public void testIllegalCountry() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> LocaleUtils.parse("en-YZ"));
assertThat(e.getMessage(), Matchers.containsString("Unknown country: YZ"));
e = expectThrows(IllegalArgumentException.class, () -> LocaleUtils.parse("en-YZ-foobar"));
assertThat(e.getMessage(), Matchers.containsString("Unknown country: YZ"));
}
public void testIllegalNumberOfParts() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> LocaleUtils.parse("en-US-foo-bar"));
assertThat(e.getMessage(), Matchers.containsString("Locales can have at most 3 parts but got 4"));
}
public void testUnderscores() {
Locale locale1 = LocaleUtils.parse("fr_FR");
Locale locale2 = LocaleUtils.parse("fr-FR");
assertEquals(locale2, locale1);
}
public void testSimple() {
assertEquals(Locale.FRENCH, LocaleUtils.parse("fr"));
assertEquals(Locale.FRANCE, LocaleUtils.parse("fr-FR"));
assertEquals(Locale.ROOT, LocaleUtils.parse("root"));
assertEquals(Locale.ROOT, LocaleUtils.parse(""));
}
}
|
LocaleUtilsTests
|
java
|
elastic__elasticsearch
|
x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OpenLdapTestContainer.java
|
{
"start": 680,
"end": 2594
}
|
class ____ extends DockerEnvironmentAwareTestContainer {
private static final String DOCKER_BASE_IMAGE = "docker.elastic.co/elasticsearch-dev/openldap-fixture:1.0";
private final TemporaryFolder temporaryFolder = new TemporaryFolder();
private Path certsPath;
public OpenLdapTestContainer() {
this(Network.newNetwork());
}
public OpenLdapTestContainer(Network network) {
super(new RemoteDockerImage(DOCKER_BASE_IMAGE));
withNetworkAliases("openldap");
withNetwork(network);
withExposedPorts(389, 636);
}
public String getLdapUrl() {
return "ldaps://localhost:" + getMappedPort(636);
}
@Override
public void start() {
super.start();
setupCerts();
}
@Override
public void stop() {
super.stop();
temporaryFolder.delete();
}
private void setupCerts() {
try {
temporaryFolder.create();
certsPath = temporaryFolder.newFolder("certs").toPath();
copyResourceToFile(getClass(), certsPath, "openldap/certs/ca.jks");
copyResourceToFile(getClass(), certsPath, "openldap/certs/ca_server.key");
copyResourceToFile(getClass(), certsPath, "openldap/certs/ca_server.pem");
copyResourceToFile(getClass(), certsPath, "openldap/certs/dhparam.pem");
copyResourceToFile(getClass(), certsPath, "openldap/certs/ldap_server.key");
copyResourceToFile(getClass(), certsPath, "openldap/certs/ldap_server.pem");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public Path getJavaKeyStorePath() {
return certsPath.resolve("ca.jks");
}
public Path getCaCertPath() {
return certsPath.resolve("ca_server.pem");
}
public Integer getDefaultPort() {
return getMappedPort(636);
}
}
|
OpenLdapTestContainer
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/util/ClassUtilsTests.java
|
{
"start": 44582,
"end": 44746
}
|
class ____$$LambdaSupplier implements Supplier<String> {
@Override
public String get() {
return "fake lambda";
}
}
@SuppressWarnings("unused")
private
|
Fake
|
java
|
apache__kafka
|
connect/api/src/main/java/org/apache/kafka/connect/storage/Converter.java
|
{
"start": 1135,
"end": 1438
}
|
interface ____ support for translating between Kafka Connect's runtime data format
* and byte[]. Internally, this likely includes an intermediate step to the format used by the serialization
* layer (e.g. JsonNode, GenericRecord, Message).
* <p>Kafka Connect may discover implementations of this
|
provides
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng6071GetResourceWithCustomPom.java
|
{
"start": 1127,
"end": 1792
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* check when path to POM set by <code>-f ./pom.xml</code>
*
* @throws Exception in case of failure
*/
@Test
public void testRunCustomPomWithDot() throws Exception {
File testDir = extractResources("/mng-6071");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.addCliArgument("-f");
verifier.addCliArgument("./pom.xml");
verifier.setForkJvm(true); // TODO: why?
verifier.addCliArgument("verify");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
|
MavenITmng6071GetResourceWithCustomPom
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/JsonIteratorTest.java
|
{
"start": 1406,
"end": 1522
}
|
class ____ {
public int id;
public String name;
public Type type;
}
public static
|
Model
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/table/BuiltInTableFunction.java
|
{
"start": 1686,
"end": 2383
}
|
class ____ runtime implementation represented as {@link TableFunction} that is constructed
* from {@link BuiltInFunctionDefinition#specialize(SpecializedContext)}.
*
* <p>Subclasses must offer a constructor that takes {@link SpecializedContext} if they are
* constructed from a {@link BuiltInFunctionDefinition}. Otherwise the {@link
* #BuiltInTableFunction()} constructor might be more appropriate.
*
* <p>By default, all built-in functions work on internal data structures. However, this can be
* changed by overriding {@link #getArgumentDataTypes()} and {@link #getOutputDataType()}. Or by
* overriding {@link #getTypeInference(DataTypeFactory)} directly.
*/
@Internal
public abstract
|
for
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Olingo4EndpointBuilderFactory.java
|
{
"start": 69725,
"end": 70047
}
|
class ____ extends AbstractEndpointBuilder implements Olingo4EndpointBuilder, AdvancedOlingo4EndpointBuilder {
public Olingo4EndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new Olingo4EndpointBuilderImpl(path);
}
}
|
Olingo4EndpointBuilderImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/ToOneAttributeMapping.java
|
{
"start": 14697,
"end": 14784
}
|
class ____{
@OneToMany( mappedBy = "card")
Set<CardField> fields;
}
|
Card
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/form/FormParamTest.java
|
{
"start": 598,
"end": 2191
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(FormClient.class, SubFormClient.class, Resource.class, Mode.class));
@TestHTTPResource
URI baseUri;
@Test
void shouldPassFormParam() {
FormClient formClient = RestClientBuilder.newBuilder().baseUri(baseUri).build(FormClient.class);
String result = formClient.directForm("par1", "par 2");
assertThat(result).isEqualTo("root formParam1:par1,formParam2:par 2");
}
@Test
void nullFormParamShouldBeAllowed() {
FormClient formClient = RestClientBuilder.newBuilder().baseUri(baseUri).build(FormClient.class);
String result = formClient.directForm("par1", null);
assertThat(result).isEqualTo("root formParam1:par1,formParam2:null");
}
@Test
void shouldPassFormParamFromSubResource() {
FormClient formClient = RestClientBuilder.newBuilder().baseUri(baseUri).build(FormClient.class);
String result = formClient.subForm("par1", "par 2").form("spar1", "spar 2");
assertThat(result).isEqualTo("sub rootParam1:par1,rootParam2:par 2,subParam1:spar1,subParam2:spar 2");
}
@Test
void shouldSupportParsingDifferentTypes() {
FormClient formClient = RestClientBuilder.newBuilder().baseUri(baseUri).build(FormClient.class);
String result = formClient.withTypes("a", 1, 2, Mode.On);
assertThat(result).isEqualTo("root text:a,number:1,wrapNumber:2,mode:On");
}
public
|
FormParamTest
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java
|
{
"start": 5534,
"end": 19382
}
|
class ____ Hadoop's writable */
private static final String HADOOP_WRITABLE_CLASS = "org.apache.hadoop.io.Writable";
private static final String HADOOP_WRITABLE_TYPEINFO_CLASS =
"org.apache.flink.api.java.typeutils.WritableTypeInfo";
private static final Logger LOG = LoggerFactory.getLogger(TypeExtractor.class);
private static final String GENERIC_TYPE_DOC_HINT =
"Please read the Flink documentation on \"Data Types & Serialization\" for details of the effect on performance and schema evolution.";
public static final int[] NO_INDEX = new int[] {};
protected TypeExtractor() {
// only create instances for special use cases
}
// --------------------------------------------------------------------------------------------
// TypeInfoFactory registry
// --------------------------------------------------------------------------------------------
private static final Map<Type, Class<? extends TypeInfoFactory<?>>>
registeredTypeInfoFactories = new HashMap<>();
/**
* Registers a type information factory globally for a certain type. Every following type
* extraction operation will use the provided factory for this type. The factory will have the
* highest precedence for this type. In a hierarchy of types the registered factory has higher
* precedence than annotations at the same level but lower precedence than factories defined
* down the hierarchy.
*
* @param t type for which a new factory is registered
* @param factory type information factory that will produce {@link TypeInformation}
*/
@Internal
public static void registerFactory(Type t, Class<? extends TypeInfoFactory<?>> factory) {
Preconditions.checkNotNull(t, "Type parameter must not be null.");
Preconditions.checkNotNull(factory, "Factory parameter must not be null.");
if (!TypeInfoFactory.class.isAssignableFrom(factory)) {
throw new IllegalArgumentException("Class is not a TypeInfoFactory.");
}
if (registeredTypeInfoFactories.containsKey(t)) {
throw new InvalidTypesException(
"A TypeInfoFactory for type '" + t + "' is already registered.");
}
registeredTypeInfoFactories.put(t, factory);
}
// --------------------------------------------------------------------------------------------
// Function specific methods
// --------------------------------------------------------------------------------------------
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes(
MapFunction<IN, OUT> mapInterface, TypeInformation<IN> inType) {
return getMapReturnTypes(mapInterface, inType, null, false);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes(
MapFunction<IN, OUT> mapInterface,
TypeInformation<IN> inType,
String functionName,
boolean allowMissing) {
return getUnaryOperatorReturnType(
mapInterface,
MapFunction.class,
0,
1,
NO_INDEX,
inType,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getFlatMapReturnTypes(
FlatMapFunction<IN, OUT> flatMapInterface, TypeInformation<IN> inType) {
return getFlatMapReturnTypes(flatMapInterface, inType, null, false);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getFlatMapReturnTypes(
FlatMapFunction<IN, OUT> flatMapInterface,
TypeInformation<IN> inType,
String functionName,
boolean allowMissing) {
return getUnaryOperatorReturnType(
flatMapInterface,
FlatMapFunction.class,
0,
1,
new int[] {1, 0},
inType,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN, ACC> TypeInformation<ACC> getAggregateFunctionAccumulatorType(
AggregateFunction<IN, ACC, ?> function,
TypeInformation<IN> inType,
String functionName,
boolean allowMissing) {
return getUnaryOperatorReturnType(
function,
AggregateFunction.class,
0,
1,
NO_INDEX,
inType,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getAggregateFunctionReturnType(
AggregateFunction<IN, ?, OUT> function,
TypeInformation<IN> inType,
String functionName,
boolean allowMissing) {
return getUnaryOperatorReturnType(
function,
AggregateFunction.class,
0,
2,
NO_INDEX,
inType,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getMapPartitionReturnTypes(
MapPartitionFunction<IN, OUT> mapPartitionInterface, TypeInformation<IN> inType) {
return getMapPartitionReturnTypes(mapPartitionInterface, inType, null, false);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getMapPartitionReturnTypes(
MapPartitionFunction<IN, OUT> mapPartitionInterface,
TypeInformation<IN> inType,
String functionName,
boolean allowMissing) {
return getUnaryOperatorReturnType(
mapPartitionInterface,
MapPartitionFunction.class,
0,
1,
new int[] {1, 0},
inType,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getGroupReduceReturnTypes(
GroupReduceFunction<IN, OUT> groupReduceInterface, TypeInformation<IN> inType) {
return getGroupReduceReturnTypes(groupReduceInterface, inType, null, false);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getGroupReduceReturnTypes(
GroupReduceFunction<IN, OUT> groupReduceInterface,
TypeInformation<IN> inType,
String functionName,
boolean allowMissing) {
return getUnaryOperatorReturnType(
groupReduceInterface,
GroupReduceFunction.class,
0,
1,
new int[] {1, 0},
inType,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getGroupCombineReturnTypes(
GroupCombineFunction<IN, OUT> combineInterface, TypeInformation<IN> inType) {
return getGroupCombineReturnTypes(combineInterface, inType, null, false);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getGroupCombineReturnTypes(
GroupCombineFunction<IN, OUT> combineInterface,
TypeInformation<IN> inType,
String functionName,
boolean allowMissing) {
return getUnaryOperatorReturnType(
combineInterface,
GroupCombineFunction.class,
0,
1,
new int[] {1, 0},
inType,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getFlatJoinReturnTypes(
FlatJoinFunction<IN1, IN2, OUT> joinInterface,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type) {
return getFlatJoinReturnTypes(joinInterface, in1Type, in2Type, null, false);
}
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getFlatJoinReturnTypes(
FlatJoinFunction<IN1, IN2, OUT> joinInterface,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type,
String functionName,
boolean allowMissing) {
return getBinaryOperatorReturnType(
joinInterface,
FlatJoinFunction.class,
0,
1,
2,
new int[] {2, 0},
in1Type,
in2Type,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getJoinReturnTypes(
JoinFunction<IN1, IN2, OUT> joinInterface,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type) {
return getJoinReturnTypes(joinInterface, in1Type, in2Type, null, false);
}
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getJoinReturnTypes(
JoinFunction<IN1, IN2, OUT> joinInterface,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type,
String functionName,
boolean allowMissing) {
return getBinaryOperatorReturnType(
joinInterface,
JoinFunction.class,
0,
1,
2,
NO_INDEX,
in1Type,
in2Type,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getCoGroupReturnTypes(
CoGroupFunction<IN1, IN2, OUT> coGroupInterface,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type) {
return getCoGroupReturnTypes(coGroupInterface, in1Type, in2Type, null, false);
}
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getCoGroupReturnTypes(
CoGroupFunction<IN1, IN2, OUT> coGroupInterface,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type,
String functionName,
boolean allowMissing) {
return getBinaryOperatorReturnType(
coGroupInterface,
CoGroupFunction.class,
0,
1,
2,
new int[] {2, 0},
in1Type,
in2Type,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getCrossReturnTypes(
CrossFunction<IN1, IN2, OUT> crossInterface,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type) {
return getCrossReturnTypes(crossInterface, in1Type, in2Type, null, false);
}
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getCrossReturnTypes(
CrossFunction<IN1, IN2, OUT> crossInterface,
TypeInformation<IN1> in1Type,
TypeInformation<IN2> in2Type,
String functionName,
boolean allowMissing) {
return getBinaryOperatorReturnType(
crossInterface,
CrossFunction.class,
0,
1,
2,
NO_INDEX,
in1Type,
in2Type,
functionName,
allowMissing);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getKeySelectorTypes(
KeySelector<IN, OUT> selectorInterface, TypeInformation<IN> inType) {
return getKeySelectorTypes(selectorInterface, inType, null, false);
}
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getKeySelectorTypes(
KeySelector<IN, OUT> selectorInterface,
TypeInformation<IN> inType,
String functionName,
boolean allowMissing) {
return getUnaryOperatorReturnType(
selectorInterface,
KeySelector.class,
0,
1,
NO_INDEX,
inType,
functionName,
allowMissing);
}
@PublicEvolving
public static <T> TypeInformation<T> getPartitionerTypes(Partitioner<T> partitioner) {
return getPartitionerTypes(partitioner, null, false);
}
@PublicEvolving
public static <T> TypeInformation<T> getPartitionerTypes(
Partitioner<T> partitioner, String functionName, boolean allowMissing) {
return getUnaryOperatorReturnType(
partitioner,
Partitioner.class,
-1,
0,
new int[] {0},
null,
functionName,
allowMissing);
}
@SuppressWarnings("unchecked")
@PublicEvolving
public static <IN> TypeInformation<IN> getInputFormatTypes(
InputFormat<IN, ?> inputFormatInterface) {
if (inputFormatInterface instanceof ResultTypeQueryable) {
return ((ResultTypeQueryable<IN>) inputFormatInterface).getProducedType();
}
return new TypeExtractor()
.privateCreateTypeInfo(
InputFormat.class, inputFormatInterface.getClass(), 0, null, null);
}
// --------------------------------------------------------------------------------------------
// Generic extraction methods
// --------------------------------------------------------------------------------------------
/**
* Returns the unary operator's return type.
*
* <p>This method can extract a type in 4 different ways:
*
* <p>1. By using the generics of the base
|
representing
|
java
|
apache__camel
|
components/camel-joor/src/test/java/org/apache/camel/language/joor/CompilationUnitTest.java
|
{
"start": 2947,
"end": 3194
}
|
class ____ {
void outerClassMethod() {
Object o = new Object(){};
}
}
""",
"1");
}
}
|
OuterClass
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CpsDoesNotUseSkipUnavailableIT.java
|
{
"start": 1661,
"end": 5932
}
|
class ____ extends Plugin implements ClusterPlugin {
@Override
public List<Setting<?>> getSettings() {
return List.of(CpsEnableSetting);
}
}
private static final Setting<String> CpsEnableSetting = Setting.simpleString(
"serverless.cross_project.enabled",
Setting.Property.NodeScope
);
@Override
protected List<String> remoteClusterAlias() {
return List.of(LINKED_CLUSTER_1);
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins(String clusterAlias) {
return CollectionUtils.appendToCopy(super.nodePlugins(clusterAlias), CpsPlugin.class);
}
@Override
protected Settings nodeSettings() {
return Settings.builder().put(super.nodeSettings()).put("serverless.cross_project.enabled", "true").build();
}
public void testCpsShouldNotUseSkipUnavailable() throws Exception {
// Add some dummy data to prove we are communicating fine with the remote.
assertAcked(client(LINKED_CLUSTER_1).admin().indices().prepareCreate("test-index"));
client(LINKED_CLUSTER_1).prepareIndex("test-index").setSource("sample-field", "sample-value").get();
client(LINKED_CLUSTER_1).admin().indices().prepareRefresh("test-index").get();
// Shut down the linked cluster we'd be targeting in the search.
try {
cluster(LINKED_CLUSTER_1).close();
} catch (Exception e) {
throw new AssertionError(e);
}
// 1. We first execute a search request with partial results allowed and shouldn't observe any top-level errors.
{
var searchRequest = getSearchRequest(true);
searchRequest.setCcsMinimizeRoundtrips(randomBoolean());
assertResponse(client().execute(TransportSearchAction.TYPE, searchRequest), result -> {
var originCluster = result.getClusters().getCluster(LOCAL_CLUSTER);
assertThat(originCluster.getStatus(), Matchers.is(SearchResponse.Cluster.Status.SUCCESSFUL));
var linkedCluster = result.getClusters().getCluster(LINKED_CLUSTER_1);
assertThat(linkedCluster.getStatus(), Matchers.is(SearchResponse.Cluster.Status.SKIPPED));
var linkedClusterFailures = result.getClusters().getCluster(LINKED_CLUSTER_1).getFailures();
assertThat(linkedClusterFailures.size(), Matchers.is(1));
// Failure is something along the lines of shard failure and is caused by a connection error.
assertThat(
linkedClusterFailures.getFirst().getCause(),
Matchers.anyOf(
Matchers.instanceOf(RemoteTransportException.class),
Matchers.instanceOf(ConnectTransportException.class)
)
);
});
}
// 2. We now execute a search request with partial results disallowed and should observe a top-level error.
{
var searchRequest = getSearchRequest(false);
searchRequest.setCcsMinimizeRoundtrips(randomBoolean());
var ae = expectThrows(AssertionError.class, () -> safeGet(client().execute(TransportSearchAction.TYPE, searchRequest)));
assertThat(ae.getCause(), Matchers.instanceOf(ExecutionException.class));
assertThat(
ae.getCause().getCause(),
Matchers.anyOf(Matchers.instanceOf(RemoteTransportException.class), Matchers.instanceOf(ConnectTransportException.class))
);
}
/*
* We usually get a top-level error when skip_unavailable is false. However, irrespective of that setting in this test, we now
* observe a top-level error when partial results are disallowed. This proves that skip_unavailable's scope has now shifted to
* allow_partial_search_results in CPS environment.
*/
}
private SearchRequest getSearchRequest(boolean allowPartialResults) {
// Include both origin and linked cluster in the search op.
var searchRequest = new SearchRequest("*", "*:*");
searchRequest.allowPartialSearchResults(allowPartialResults);
return searchRequest;
}
}
|
CpsPlugin
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/filters/PropertyPathTest.java
|
{
"start": 1204,
"end": 1574
}
|
class ____ implements PropertyPreFilter {
public boolean apply(JSONSerializer serializer, Object source, String name) {
String path = serializer.getContext().toString() + "." + name;
if (path.startsWith("$.b.d")) {
return false;
}
return true;
}
}
public static
|
MyPropertyPreFilter
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/logging/NacosLoggingAdapter.java
|
{
"start": 732,
"end": 2002
}
|
interface ____ {
/**
* Whether current adapter is adapted for specified logger class.
*
* @param loggerClass {@link org.slf4j.Logger} implementation class
* @return {@code true} if current adapter can adapt this {@link org.slf4j.Logger} implementation, otherwise {@code
* false}
*/
boolean isAdaptedLogger(Class<?> loggerClass);
/**
* Load Nacos logging configuration into log context.
*
* @param loggingProperties logging properties
*/
void loadConfiguration(NacosLoggingProperties loggingProperties);
/**
* Whether need reload configuration into log context.
*
* @return {@code true} when context don't contain nacos logging configuration. otherwise {@code false}
*/
boolean isNeedReloadConfiguration();
/**
* Get current logging default config location.
*
* @return default config location
*/
String getDefaultConfigLocation();
/**
* Whether current adapter enabled, design for users which want to log nacos client into app logs.
*
* @return {@code true} when enabled, otherwise {@code false}, default {@code true}
*/
default boolean isEnabled() {
return true;
}
}
|
NacosLoggingAdapter
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/dualvalue/DualValue_enumValues_Test.java
|
{
"start": 1044,
"end": 2782
}
|
class ____ {
private static final List<String> PATH = list("foo", "bar");
@Test
void isExpectedAnEnum_should_return_true_when_expected_is_an_enum() {
// GIVEN
DualValue dualValue = new DualValue(PATH, "", BLUE);
// WHEN
boolean isExpectedAnEnum = dualValue.isExpectedAnEnum();
// THEN
then(isExpectedAnEnum).isTrue();
}
@Test
void isExpectedAnEnum_should_return_false_when_expected_is_not_an_enum() {
// GIVEN
DualValue dualValue = new DualValue(PATH, RED, "");
// WHEN
boolean isExpectedAnEnum = dualValue.isExpectedAnEnum();
// THEN
then(isExpectedAnEnum).isFalse();
}
@Test
void isExpectedAnEnum_should_return_false_when_expected_is_null() {
// GIVEN
DualValue dualValue = new DualValue(PATH, RED, null);
// WHEN
boolean isExpectedAnEnum = dualValue.isExpectedAnEnum();
// THEN
then(isExpectedAnEnum).isFalse();
}
@Test
void isActualAnEnum_should_return_true_when_actual_is_an_enum() {
// GIVEN
DualValue dualValue = new DualValue(PATH, BLUE, "");
// WHEN
boolean isActualAnEnum = dualValue.isActualAnEnum();
// THEN
then(isActualAnEnum).isTrue();
}
@Test
void isActualAnEnum_should_return_false_when_actual_is_not_an_enum() {
// GIVEN
DualValue dualValue = new DualValue(PATH, "", RED);
// WHEN
boolean isActualAnEnum = dualValue.isActualAnEnum();
// THEN
then(isActualAnEnum).isFalse();
}
@Test
void isActualAnEnum_should_return_false_when_actual_is_null() {
// GIVEN
DualValue dualValue = new DualValue(PATH, null, "");
// WHEN
boolean actualIsEnum = dualValue.isActualAnEnum();
// THEN
then(actualIsEnum).isFalse();
}
}
|
DualValue_enumValues_Test
|
java
|
apache__camel
|
components/camel-consul/src/main/java/org/apache/camel/component/consul/ConsulRegistry.java
|
{
"start": 8483,
"end": 8841
}
|
class
____.putValue(key, value);
// store just as a bookmark
kvClient.putValue(asLongKey(object, key), "1");
kvClient.releaseLock(lockKey, sessionId);
}
private static String asLongKey(Object object, String key) {
return object.getClass().getName().replace('$', '/') + "/" + key;
}
public static
|
kvClient
|
java
|
google__guice
|
core/src/com/google/inject/internal/RealMapBinder.java
|
{
"start": 29461,
"end": 31402
}
|
class ____<K, V>
extends RealMapBinderProviderWithDependencies<K, V, Map<K, Provider<V>>> {
private ImmutableMap<K, Provider<V>> mapOfProviders;
private ImmutableSet<Dependency<?>> dependencies = RealMapBinder.MODULE_DEPENDENCIES;
private RealProviderMapProvider(BindingSelection<K, V> bindingSelection) {
super(bindingSelection);
}
@Override
public Set<Dependency<?>> getDependencies() {
return dependencies;
}
@Override
protected void doInitialize(InjectorImpl injector, Errors errors) {
ImmutableMap.Builder<K, Provider<V>> mapOfProvidersBuilder = ImmutableMap.builder();
ImmutableSet.Builder<Dependency<?>> dependenciesBuilder = ImmutableSet.builder();
for (Map.Entry<K, Binding<V>> entry : bindingSelection.getMapBindings().entrySet()) {
mapOfProvidersBuilder.put(entry.getKey(), entry.getValue().getProvider());
dependenciesBuilder.add(Dependency.get(getKeyOfProvider(entry.getValue().getKey())));
}
mapOfProviders = mapOfProvidersBuilder.buildOrThrow();
dependencies = dependenciesBuilder.build();
}
@Override
protected Map<K, Provider<V>> doProvision(InternalContext context, Dependency<?> dependency) {
return mapOfProviders;
}
@Override
protected Provider<Map<K, Provider<V>>> doMakeProvider(
InjectorImpl injector, Dependency<?> dependency) {
return InternalFactory.makeProviderFor(mapOfProviders, this);
}
@Override
protected MethodHandle doGetHandle(LinkageContext context) {
return InternalMethodHandles.constantFactoryGetHandle(mapOfProviders);
}
}
/**
* Implementation of a provider instance for the map that also exposes details about the MapBinder
* using the extension SPI, delegating to another provider instance for non-extension (e.g, the
* actual provider instance info) data.
*/
private static final
|
RealProviderMapProvider
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/inheritance/ScopeInheritanceStereotypeTest.java
|
{
"start": 1019,
"end": 1147
}
|
class ____ {
public void ping() {
}
}
@Model
// should inherit @ApplicationScoped
static
|
SuperBean
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isBeforeOrEqualTo_Test.java
|
{
"start": 1294,
"end": 2889
}
|
class ____ extends AbstractZonedDateTimeAssertBaseTest {
@Override
protected ZonedDateTimeAssert invoke_api_method() {
return assertions.isBeforeOrEqualTo(NOW)
.isBeforeOrEqualTo(TOMORROW.toString());
}
@Override
protected void verify_internal_effects() {
verify(comparables).assertIsBeforeOrEqualTo(getInfo(assertions), getActual(assertions), NOW);
verify(comparables).assertIsBeforeOrEqualTo(getInfo(assertions), getActual(assertions), TOMORROW);
}
@Test
void should_fail_if_zonedDateTime_parameter_is_null() {
// GIVEN
ZonedDateTime otherZonedDateTime = null;
// WHEN
ThrowingCallable code = () -> assertThat(NOW).isBeforeOrEqualTo(otherZonedDateTime);
// THEN
thenIllegalArgumentException().isThrownBy(code)
.withMessage("The ZonedDateTime to compare actual with should not be null");
}
@Test
void should_fail_if_zonedDateTime_as_string_parameter_is_null() {
// GIVEN
String otherZonedDateTimeAsString = null;
// WHEN
ThrowingCallable code = () -> assertThat(NOW).isBeforeOrEqualTo(otherZonedDateTimeAsString);
// THEN
thenIllegalArgumentException().isThrownBy(code)
.withMessage("The String representing the ZonedDateTime to compare actual with should not be null");
}
@Test
void should_fail_if_given_string_parameter_cant_be_parsed() {
assertThatThrownBy(() -> assertions.isBeforeOrEqualTo("not a ZonedDateTime")).isInstanceOf(DateTimeParseException.class);
}
}
|
ZonedDateTimeAssert_isBeforeOrEqualTo_Test
|
java
|
quarkusio__quarkus
|
integration-tests/injectmock/src/test/java/io/quarkus/it/mockbean/RequestScopedSpyTest.java
|
{
"start": 688,
"end": 1037
}
|
class ____ {
@InjectSpy
private RequestBean spiedBean;
@Inject
private SomeOtherBean injectedBean;
@Test
void verifySpyWorks() {
// Executes gracefully
assertNotNull(spiedBean);
injectedBean.pong();
Mockito.verify(spiedBean, Mockito.times(1)).ping();
}
@Nested
|
RequestScopedSpyTest
|
java
|
greenrobot__greendao
|
DaoCore/src/main/java/org/greenrobot/greendao/query/WhereCondition.java
|
{
"start": 1181,
"end": 2159
}
|
class ____ implements WhereCondition {
protected final boolean hasSingleValue;
protected final Object value;
protected final Object[] values;
public AbstractCondition() {
hasSingleValue = false;
value = null;
values = null;
}
public AbstractCondition(Object value) {
this.value = value;
hasSingleValue = true;
values = null;
}
public AbstractCondition(Object[] values) {
this.value = null;
hasSingleValue = false;
this.values = values;
}
@Override
public void appendValuesTo(List<Object> valuesTarget) {
if (hasSingleValue) {
valuesTarget.add(value);
} else if (values != null) {
for (Object value : values) {
valuesTarget.add(value);
}
}
}
}
|
AbstractCondition
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/clause/OracleStorageClause.java
|
{
"start": 863,
"end": 5008
}
|
class ____ extends OracleSQLObjectImpl {
private SQLExpr initial;
private SQLExpr next;
private SQLExpr minExtents;
private SQLExpr maxExtents;
private SQLExpr maxSize;
private SQLExpr pctIncrease;
private SQLExpr freeLists;
private SQLExpr freeListGroups;
private SQLExpr bufferPool;
private SQLExpr objno;
private FlashCacheType flashCache;
private FlashCacheType cellFlashCache;
public OracleStorageClause clone() {
OracleStorageClause x = new OracleStorageClause();
if (initial != null) {
x.setInitial(initial.clone());
}
if (next != null) {
x.setNext(next.clone());
}
if (minExtents != null) {
x.setMinExtents(minExtents.clone());
}
if (maxExtents != null) {
x.setMinExtents(maxExtents.clone());
}
if (maxSize != null) {
x.setMaxSize(maxSize.clone());
}
if (pctIncrease != null) {
x.setPctIncrease(pctIncrease.clone());
}
if (freeLists != null) {
x.setFreeLists(freeLists.clone());
}
if (freeListGroups != null) {
x.setFreeListGroups(freeListGroups.clone());
}
if (bufferPool != null) {
x.setBufferPool(bufferPool.clone());
}
if (objno != null) {
x.setObjno(objno.clone());
}
x.flashCache = flashCache;
x.cellFlashCache = cellFlashCache;
return x;
}
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, initial);
acceptChild(visitor, next);
acceptChild(visitor, minExtents);
acceptChild(visitor, maxExtents);
acceptChild(visitor, maxSize);
acceptChild(visitor, pctIncrease);
acceptChild(visitor, freeLists);
acceptChild(visitor, freeListGroups);
acceptChild(visitor, bufferPool);
acceptChild(visitor, objno);
}
visitor.endVisit(this);
}
public SQLExpr getMaxSize() {
return maxSize;
}
public void setMaxSize(SQLExpr maxSize) {
this.maxSize = maxSize;
}
public FlashCacheType getFlashCache() {
return flashCache;
}
public void setFlashCache(FlashCacheType flashCache) {
this.flashCache = flashCache;
}
public FlashCacheType getCellFlashCache() {
return cellFlashCache;
}
public void setCellFlashCache(FlashCacheType cellFlashCache) {
this.cellFlashCache = cellFlashCache;
}
public SQLExpr getPctIncrease() {
return pctIncrease;
}
public void setPctIncrease(SQLExpr pctIncrease) {
this.pctIncrease = pctIncrease;
}
public SQLExpr getNext() {
return next;
}
public void setNext(SQLExpr next) {
this.next = next;
}
public SQLExpr getMinExtents() {
return minExtents;
}
public void setMinExtents(SQLExpr minExtents) {
this.minExtents = minExtents;
}
public SQLExpr getMaxExtents() {
return maxExtents;
}
public void setMaxExtents(SQLExpr maxExtents) {
this.maxExtents = maxExtents;
}
public SQLExpr getObjno() {
return objno;
}
public void setObjno(SQLExpr objno) {
this.objno = objno;
}
public SQLExpr getInitial() {
return initial;
}
public void setInitial(SQLExpr initial) {
this.initial = initial;
}
public SQLExpr getFreeLists() {
return freeLists;
}
public void setFreeLists(SQLExpr freeLists) {
this.freeLists = freeLists;
}
public SQLExpr getFreeListGroups() {
return freeListGroups;
}
public void setFreeListGroups(SQLExpr freeListGroups) {
this.freeListGroups = freeListGroups;
}
public SQLExpr getBufferPool() {
return bufferPool;
}
public void setBufferPool(SQLExpr bufferPool) {
this.bufferPool = bufferPool;
}
public static
|
OracleStorageClause
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/OpenAPIDocumentPublisher.java
|
{
"start": 1077,
"end": 1212
}
|
interface ____ extends OpenAPIExtension {
void publish(Function<OpenAPIRequest, Pair<OpenAPI, String>> fn);
}
|
OpenAPIDocumentPublisher
|
java
|
grpc__grpc-java
|
xds/src/main/java/io/grpc/xds/internal/security/SecurityProtocolNegotiators.java
|
{
"start": 4183,
"end": 4801
}
|
class ____ implements InternalProtocolNegotiator.ClientFactory {
private final InternalProtocolNegotiator.ClientFactory fallbackProtocolNegotiator;
private ClientFactory(InternalProtocolNegotiator.ClientFactory fallbackNegotiator) {
this.fallbackProtocolNegotiator = fallbackNegotiator;
}
@Override
public ProtocolNegotiator newNegotiator() {
return new ClientSecurityProtocolNegotiator(fallbackProtocolNegotiator.newNegotiator());
}
@Override
public int getDefaultPort() {
return GrpcUtil.DEFAULT_PORT_SSL;
}
}
@VisibleForTesting
static final
|
ClientFactory
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/test/groovy/io/micronaut/http/server/netty/interceptor/SecureController.java
|
{
"start": 994,
"end": 1148
}
|
class ____ {
@Get
public HttpResponse index(String username) {
return HttpResponse.ok("Authenticated: " + username);
}
}
|
SecureController
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.