language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/FileJsonFormatterDefaultConfigTest.java | {
"start": 793,
"end": 2780
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("application-file-json-formatter-default.properties");
@Test
public void jsonFormatterDefaultConfigurationTest() {
JsonFormatter jsonFormatter = getJsonFormatter();
assertThat(jsonFormatter.isPrettyPrint()).isFalse();
assertThat(jsonFormatter.getDateTimeFormatter().toString())
.isEqualTo(DateTimeFormatter.ISO_OFFSET_DATE_TIME.withZone(ZoneId.systemDefault()).toString());
assertThat(jsonFormatter.getDateTimeFormatter().getZone()).isEqualTo(ZoneId.systemDefault());
assertThat(jsonFormatter.getExceptionOutputType()).isEqualTo(StructuredFormatter.ExceptionOutputType.DETAILED);
assertThat(jsonFormatter.getRecordDelimiter()).isEqualTo("\n");
assertThat(jsonFormatter.isPrintDetails()).isFalse();
assertThat(jsonFormatter.getExcludedKeys()).isEmpty();
assertThat(jsonFormatter.getAdditionalFields().entrySet()).isEmpty();
}
public static JsonFormatter getJsonFormatter() {
LogManager logManager = LogManager.getLogManager();
assertThat(logManager).isInstanceOf(org.jboss.logmanager.LogManager.class);
QuarkusDelayedHandler delayedHandler = InitialConfigurator.DELAYED_HANDLER;
assertThat(Logger.getLogger("").getHandlers()).contains(delayedHandler);
assertThat(delayedHandler.getLevel()).isEqualTo(Level.ALL);
Handler handler = Arrays.stream(delayedHandler.getHandlers())
.filter(h -> (h instanceof SizeRotatingFileHandler))
.findFirst().orElse(null);
assertThat(handler).isNotNull();
assertThat(handler.getLevel()).isEqualTo(Level.WARNING);
Formatter formatter = handler.getFormatter();
assertThat(formatter).isInstanceOf(JsonFormatter.class);
return (JsonFormatter) formatter;
}
}
| FileJsonFormatterDefaultConfigTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/metrics/ReporterSetup.java | {
"start": 1261,
"end": 2340
} | class ____ extends AbstractReporterSetup<MetricReporter, Metric> {
public ReporterSetup(
final String name,
final MetricConfig configuration,
MetricReporter reporter,
final ReporterFilter<Metric> filter,
final Map<String, String> additionalVariables) {
super(name, configuration, reporter, filter, additionalVariables);
}
public Optional<String> getIntervalSettings() {
return Optional.ofNullable(
configuration.getString(MetricOptions.REPORTER_INTERVAL.key(), null));
}
@Override
public Optional<String> getDelimiter() {
return Optional.ofNullable(
configuration.getString(MetricOptions.REPORTER_SCOPE_DELIMITER.key(), null));
}
@Override
protected ConfigOption<String> getDelimiterConfigOption() {
return MetricOptions.REPORTER_SCOPE_DELIMITER;
}
@Override
protected ConfigOption<String> getExcludedVariablesConfigOption() {
return MetricOptions.REPORTER_EXCLUDED_VARIABLES;
}
}
| ReporterSetup |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/client/RegisteredClientTests.java | {
"start": 1286,
"end": 19877
} | class ____ {
private static final String ID = "registration-1";
private static final String CLIENT_ID = "client-1";
private static final String CLIENT_SECRET = "secret";
private static final Set<String> REDIRECT_URIS = Collections.singleton("https://example.com");
private static final Set<String> POST_LOGOUT_REDIRECT_URIS = Collections
.singleton("https://example.com/oidc-post-logout");
private static final Set<String> SCOPES = Collections
.unmodifiableSet(Stream.of("openid", "profile", "email").collect(Collectors.toSet()));
private static final Set<ClientAuthenticationMethod> CLIENT_AUTHENTICATION_METHODS = Collections
.singleton(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
@Test
public void buildWhenAuthorizationGrantTypesNotSetThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.build());
}
@Test
public void buildWhenAllAttributesProvidedThenAllAttributesAreSet() {
Instant clientIdIssuedAt = Instant.now();
Instant clientSecretExpiresAt = clientIdIssuedAt.plus(30, ChronoUnit.DAYS);
RegisteredClient registration = RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientIdIssuedAt(clientIdIssuedAt)
.clientSecret(CLIENT_SECRET)
.clientSecretExpiresAt(clientSecretExpiresAt)
.clientName("client-name")
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.postLogoutRedirectUris(
(postLogoutRedirectUris) -> postLogoutRedirectUris.addAll(POST_LOGOUT_REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getId()).isEqualTo(ID);
assertThat(registration.getClientId()).isEqualTo(CLIENT_ID);
assertThat(registration.getClientIdIssuedAt()).isEqualTo(clientIdIssuedAt);
assertThat(registration.getClientSecret()).isEqualTo(CLIENT_SECRET);
assertThat(registration.getClientSecretExpiresAt()).isEqualTo(clientSecretExpiresAt);
assertThat(registration.getClientName()).isEqualTo("client-name");
assertThat(registration.getAuthorizationGrantTypes())
.isEqualTo(Collections.singleton(AuthorizationGrantType.AUTHORIZATION_CODE));
assertThat(registration.getClientAuthenticationMethods()).isEqualTo(CLIENT_AUTHENTICATION_METHODS);
assertThat(registration.getRedirectUris()).isEqualTo(REDIRECT_URIS);
assertThat(registration.getPostLogoutRedirectUris()).isEqualTo(POST_LOGOUT_REDIRECT_URIS);
assertThat(registration.getScopes()).isEqualTo(SCOPES);
}
@Test
public void buildWhenIdIsNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(null));
}
@Test
public void buildWhenClientIdIsNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(null)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build());
}
@Test
public void buildWhenRedirectUrisNotProvidedThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.scopes((scopes) -> scopes.addAll(SCOPES))
.build());
}
@Test
public void buildWhenRedirectUrisConsumerClearsSetThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUri("https://example.com")
.redirectUris(Set::clear)
.scopes((scopes) -> scopes.addAll(SCOPES))
.build());
}
@Test
public void buildWhenClientAuthenticationMethodNotProvidedThenDefaultToBasic() {
RegisteredClient registration = RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getClientAuthenticationMethods())
.isEqualTo(Collections.singleton(ClientAuthenticationMethod.CLIENT_SECRET_BASIC));
}
@Test
public void buildWhenScopeIsEmptyThenScopeNotRequired() {
RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.build();
}
@Test
public void buildWhenScopeConsumerIsProvidedThenConsumerAccepted() {
RegisteredClient registration = RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getScopes()).isEqualTo(SCOPES);
}
@Test
public void buildWhenScopeContainsSpaceThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scope("openid profile")
.build());
}
@Test
public void buildWhenScopeContainsInvalidCharacterThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scope("an\"invalid\"scope")
.build());
}
@Test
public void buildWhenRedirectUriInvalidThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUri("invalid URI")
.scopes((scopes) -> scopes.addAll(SCOPES))
.build());
}
@Test
public void buildWhenRedirectUriContainsFragmentThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUri("https://example.com/page#fragment")
.scopes((scopes) -> scopes.addAll(SCOPES))
.build());
}
@Test
public void buildWhenPostLogoutRedirectUriInvalidThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.postLogoutRedirectUri("invalid URI")
.build());
}
@Test
public void buildWhenPostLogoutRedirectUriContainsFragmentThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUri("https://example.com")
.postLogoutRedirectUri("https://example.com/index#fragment")
.scopes((scopes) -> scopes.addAll(SCOPES))
.build());
}
@Test
public void buildWhenTwoAuthorizationGrantTypesAreProvidedThenBothAreRegistered() {
RegisteredClient registration = RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.authorizationGrantType(AuthorizationGrantType.CLIENT_CREDENTIALS)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getAuthorizationGrantTypes()).containsExactlyInAnyOrder(
AuthorizationGrantType.AUTHORIZATION_CODE, AuthorizationGrantType.CLIENT_CREDENTIALS);
}
@Test
public void buildWhenAuthorizationGrantTypesConsumerIsProvidedThenConsumerAccepted() {
RegisteredClient registration = RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantTypes((authorizationGrantTypes) -> {
authorizationGrantTypes.add(AuthorizationGrantType.AUTHORIZATION_CODE);
authorizationGrantTypes.add(AuthorizationGrantType.CLIENT_CREDENTIALS);
})
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getAuthorizationGrantTypes()).containsExactlyInAnyOrder(
AuthorizationGrantType.AUTHORIZATION_CODE, AuthorizationGrantType.CLIENT_CREDENTIALS);
}
@Test
public void buildWhenAuthorizationGrantTypesConsumerClearsSetThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.authorizationGrantTypes(Set::clear)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build());
}
@Test
public void buildWhenTwoClientAuthenticationMethodsAreProvidedThenBothAreRegistered() {
RegisteredClient registration = RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_POST)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getClientAuthenticationMethods()).containsExactlyInAnyOrder(
ClientAuthenticationMethod.CLIENT_SECRET_BASIC, ClientAuthenticationMethod.CLIENT_SECRET_POST);
}
@Test
public void buildWhenClientAuthenticationMethodsConsumerIsProvidedThenConsumerAccepted() {
RegisteredClient registration = RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethods((clientAuthenticationMethods) -> {
clientAuthenticationMethods.add(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
clientAuthenticationMethods.add(ClientAuthenticationMethod.CLIENT_SECRET_POST);
})
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getClientAuthenticationMethods()).containsExactlyInAnyOrder(
ClientAuthenticationMethod.CLIENT_SECRET_BASIC, ClientAuthenticationMethod.CLIENT_SECRET_POST);
}
@Test
public void buildWhenOverrideIdThenOverridden() {
String overriddenId = "override";
RegisteredClient registration = RegisteredClient.withId(ID)
.id(overriddenId)
.clientId(CLIENT_ID)
.clientSecret(CLIENT_SECRET)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getId()).isEqualTo(overriddenId);
}
@Test
public void buildWhenRegisteredClientProvidedThenMakesACopy() {
RegisteredClient registration = TestRegisteredClients.registeredClient().build();
RegisteredClient updated = RegisteredClient.from(registration).build();
assertThat(registration.getId()).isEqualTo(updated.getId());
assertThat(registration.getClientId()).isEqualTo(updated.getClientId());
assertThat(registration.getClientIdIssuedAt()).isEqualTo(updated.getClientIdIssuedAt());
assertThat(registration.getClientSecret()).isEqualTo(updated.getClientSecret());
assertThat(registration.getClientSecretExpiresAt()).isEqualTo(updated.getClientSecretExpiresAt());
assertThat(registration.getClientName()).isEqualTo(updated.getClientName());
assertThat(registration.getClientAuthenticationMethods()).isEqualTo(updated.getClientAuthenticationMethods());
assertThat(registration.getClientAuthenticationMethods()).isNotSameAs(updated.getClientAuthenticationMethods());
assertThat(registration.getAuthorizationGrantTypes()).isEqualTo(updated.getAuthorizationGrantTypes());
assertThat(registration.getAuthorizationGrantTypes()).isNotSameAs(updated.getAuthorizationGrantTypes());
assertThat(registration.getRedirectUris()).isEqualTo(updated.getRedirectUris());
assertThat(registration.getRedirectUris()).isNotSameAs(updated.getRedirectUris());
assertThat(registration.getPostLogoutRedirectUris()).isEqualTo(updated.getPostLogoutRedirectUris());
assertThat(registration.getPostLogoutRedirectUris()).isNotSameAs(updated.getPostLogoutRedirectUris());
assertThat(registration.getScopes()).isEqualTo(updated.getScopes());
assertThat(registration.getScopes()).isNotSameAs(updated.getScopes());
assertThat(registration.getClientSettings()).isEqualTo(updated.getClientSettings());
assertThat(registration.getClientSettings()).isNotSameAs(updated.getClientSettings());
assertThat(registration.getTokenSettings()).isEqualTo(updated.getTokenSettings());
assertThat(registration.getTokenSettings()).isNotSameAs(updated.getTokenSettings());
}
@Test
public void buildWhenRegisteredClientValuesOverriddenThenPropagated() {
RegisteredClient registration = TestRegisteredClients.registeredClient().build();
String newName = "client-name";
String newSecret = "new-secret";
String newScope = "new-scope";
String newRedirectUri = "https://another-redirect-uri.com";
String newPostLogoutRedirectUri = "https://another-post-logout-redirect-uri.com";
RegisteredClient updated = RegisteredClient.from(registration)
.clientName(newName)
.clientSecret(newSecret)
.scopes((scopes) -> {
scopes.clear();
scopes.add(newScope);
})
.redirectUris((redirectUris) -> {
redirectUris.clear();
redirectUris.add(newRedirectUri);
})
.postLogoutRedirectUris((postLogoutRedirectUris) -> {
postLogoutRedirectUris.clear();
postLogoutRedirectUris.add(newPostLogoutRedirectUri);
})
.build();
assertThat(registration.getClientName()).isNotEqualTo(newName);
assertThat(updated.getClientName()).isEqualTo(newName);
assertThat(registration.getClientSecret()).isNotEqualTo(newSecret);
assertThat(updated.getClientSecret()).isEqualTo(newSecret);
assertThat(registration.getScopes()).doesNotContain(newScope);
assertThat(updated.getScopes()).containsExactly(newScope);
assertThat(registration.getRedirectUris()).doesNotContain(newRedirectUri);
assertThat(updated.getRedirectUris()).containsExactly(newRedirectUri);
assertThat(registration.getPostLogoutRedirectUris()).doesNotContain(newPostLogoutRedirectUri);
assertThat(updated.getPostLogoutRedirectUris()).containsExactly(newPostLogoutRedirectUri);
}
@Test
public void buildWhenPublicClientTypeThenDefaultSettings() {
Instant clientIdIssuedAt = Instant.now();
RegisteredClient registration = RegisteredClient.withId(ID)
.clientId(CLIENT_ID)
.clientIdIssuedAt(clientIdIssuedAt)
.clientName("client-name")
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(ClientAuthenticationMethod.NONE)
.redirectUris((redirectUris) -> redirectUris.addAll(REDIRECT_URIS))
.scopes((scopes) -> scopes.addAll(SCOPES))
.build();
assertThat(registration.getId()).isEqualTo(ID);
assertThat(registration.getClientId()).isEqualTo(CLIENT_ID);
assertThat(registration.getClientIdIssuedAt()).isEqualTo(clientIdIssuedAt);
assertThat(registration.getClientName()).isEqualTo("client-name");
assertThat(registration.getAuthorizationGrantTypes())
.isEqualTo(Collections.singleton(AuthorizationGrantType.AUTHORIZATION_CODE));
assertThat(registration.getClientAuthenticationMethods())
.isEqualTo(Collections.singleton(ClientAuthenticationMethod.NONE));
assertThat(registration.getRedirectUris()).isEqualTo(REDIRECT_URIS);
assertThat(registration.getScopes()).isEqualTo(SCOPES);
assertThat(registration.getClientSettings().isRequireProofKey()).isTrue();
assertThat(registration.getClientSettings().isRequireAuthorizationConsent()).isTrue();
}
}
| RegisteredClientTests |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/main/java/org/springframework/boot/webmvc/autoconfigure/DispatcherServletAutoConfiguration.java | {
"start": 7446,
"end": 10131
} | class ____ extends SpringBootCondition {
@Override
public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) {
ConfigurableListableBeanFactory beanFactory = context.getBeanFactory();
Assert.state(beanFactory != null, "'beanFactory' must not be null");
ConditionOutcome outcome = checkDefaultDispatcherName(beanFactory);
if (!outcome.isMatch()) {
return outcome;
}
return checkServletRegistration(beanFactory);
}
private ConditionOutcome checkDefaultDispatcherName(ConfigurableListableBeanFactory beanFactory) {
boolean containsDispatcherBean = beanFactory.containsBean(DEFAULT_DISPATCHER_SERVLET_BEAN_NAME);
if (!containsDispatcherBean) {
return ConditionOutcome.match();
}
List<String> servlets = Arrays
.asList(beanFactory.getBeanNamesForType(DispatcherServlet.class, false, false));
if (!servlets.contains(DEFAULT_DISPATCHER_SERVLET_BEAN_NAME)) {
return ConditionOutcome.noMatch(
startMessage().found("non dispatcher servlet").items(DEFAULT_DISPATCHER_SERVLET_BEAN_NAME));
}
return ConditionOutcome.match();
}
private ConditionOutcome checkServletRegistration(ConfigurableListableBeanFactory beanFactory) {
ConditionMessage.Builder message = startMessage();
List<String> registrations = Arrays
.asList(beanFactory.getBeanNamesForType(ServletRegistrationBean.class, false, false));
boolean containsDispatcherRegistrationBean = beanFactory
.containsBean(DEFAULT_DISPATCHER_SERVLET_REGISTRATION_BEAN_NAME);
if (registrations.isEmpty()) {
if (containsDispatcherRegistrationBean) {
return ConditionOutcome.noMatch(message.found("non servlet registration bean")
.items(DEFAULT_DISPATCHER_SERVLET_REGISTRATION_BEAN_NAME));
}
return ConditionOutcome.match(message.didNotFind("servlet registration bean").atAll());
}
if (registrations.contains(DEFAULT_DISPATCHER_SERVLET_REGISTRATION_BEAN_NAME)) {
return ConditionOutcome.noMatch(message.found("servlet registration bean")
.items(DEFAULT_DISPATCHER_SERVLET_REGISTRATION_BEAN_NAME));
}
if (containsDispatcherRegistrationBean) {
return ConditionOutcome.noMatch(message.found("non servlet registration bean")
.items(DEFAULT_DISPATCHER_SERVLET_REGISTRATION_BEAN_NAME));
}
return ConditionOutcome.match(message.found("servlet registration beans")
.items(Style.QUOTE, registrations)
.append("and none is named " + DEFAULT_DISPATCHER_SERVLET_REGISTRATION_BEAN_NAME));
}
private ConditionMessage.Builder startMessage() {
return ConditionMessage.forCondition("DispatcherServlet Registration");
}
}
}
| DispatcherServletRegistrationCondition |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/HttpVersion.java | {
"start": 732,
"end": 1506
} | enum ____ {
/**
* {@code HTTP/1.0}.
*/
HTTP_1_0,
/**
* {@code HTTP/1.1}.
*/
HTTP_1_1,
/**
* {@code HTTP/2.0}.
*/
HTTP_2_0;
/**
* Return an {@link HttpVersion} for the given value.
* @param v The value
* @return The version
* @throws IllegalArgumentException If the given value is not a valid http version.
*/
public static HttpVersion valueOf(double v) {
if (v == 1.0d) {
return HttpVersion.HTTP_1_0;
} else if (v == 1.1d) {
return HttpVersion.HTTP_1_1;
} else if (v == 2.0) {
return HttpVersion.HTTP_2_0;
} else {
throw new IllegalArgumentException("Invalid HTTP version: " + v);
}
}
}
| HttpVersion |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FlinkEndpointBuilderFactory.java | {
"start": 21095,
"end": 21407
} | class ____ extends AbstractEndpointBuilder implements FlinkEndpointBuilder, AdvancedFlinkEndpointBuilder {
public FlinkEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new FlinkEndpointBuilderImpl(path);
}
} | FlinkEndpointBuilderImpl |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/dispatcher/TestingDispatcher.java | {
"start": 7096,
"end": 15988
} | class ____ {
private DispatcherId fencingToken = DispatcherId.generate();
private Collection<ExecutionPlan> recoveredJobs = Collections.emptyList();
@Nullable private Collection<JobResult> recoveredDirtyJobs = null;
private HighAvailabilityServices highAvailabilityServices =
new TestingHighAvailabilityServices();
private TestingResourceManagerGateway resourceManagerGateway =
new TestingResourceManagerGateway();
private GatewayRetriever<ResourceManagerGateway> resourceManagerGatewayRetriever =
() -> CompletableFuture.completedFuture(resourceManagerGateway);
private HeartbeatServices heartbeatServices = new HeartbeatServicesImpl(1000L, 1000L);
private ExecutionPlanWriter executionPlanWriter = NoOpExecutionPlanWriter.INSTANCE;
private JobResultStore jobResultStore = new EmbeddedJobResultStore();
private Configuration configuration = new Configuration();
// even-though it's labeled as @Nullable, it's a mandatory field that needs to be set before
// building the Dispatcher instance
@Nullable private BlobServer blobServer = null;
private FatalErrorHandler fatalErrorHandler = new TestingFatalErrorHandler();
private JobManagerMetricGroup jobManagerMetricGroup =
UnregisteredMetricGroups.createUnregisteredJobManagerMetricGroup();
@Nullable private String metricServiceQueryAddress = null;
private Executor ioExecutor = ForkJoinPool.commonPool();
private HistoryServerArchivist historyServerArchivist = VoidHistoryServerArchivist.INSTANCE;
private ExecutionGraphInfoStore executionGraphInfoStore =
new MemoryExecutionGraphInfoStore();
private JobManagerRunnerFactory jobManagerRunnerFactory =
new TestingJobMasterServiceLeadershipRunnerFactory();
private CleanupRunnerFactory cleanupRunnerFactory = new TestingCleanupRunnerFactory();
private DispatcherBootstrapFactory dispatcherBootstrapFactory =
(dispatcher, scheduledExecutor, errorHandler) -> new NoOpDispatcherBootstrap();
private DispatcherOperationCaches dispatcherOperationCaches =
new DispatcherOperationCaches();
private JobManagerRunnerRegistry jobManagerRunnerRegistry =
new DefaultJobManagerRunnerRegistry(1);
@Nullable private ResourceCleanerFactory resourceCleanerFactory;
public Builder setFencingToken(DispatcherId fencingToken) {
this.fencingToken = fencingToken;
return this;
}
public Builder setRecoveredJobs(Collection<ExecutionPlan> recoveredJobs) {
this.recoveredJobs = recoveredJobs;
return this;
}
public Builder setRecoveredDirtyJobs(@Nullable Collection<JobResult> recoveredDirtyJobs) {
this.recoveredDirtyJobs = recoveredDirtyJobs;
return this;
}
public Builder setHighAvailabilityServices(
HighAvailabilityServices highAvailabilityServices) {
this.highAvailabilityServices = highAvailabilityServices;
return this;
}
public Builder setResourceManagerGateway(
TestingResourceManagerGateway resourceManagerGateway) {
this.resourceManagerGateway = resourceManagerGateway;
return this;
}
public Builder setResourceManagerGatewayRetriever(
GatewayRetriever<ResourceManagerGateway> resourceManagerGatewayRetriever) {
this.resourceManagerGatewayRetriever = resourceManagerGatewayRetriever;
return this;
}
public Builder setHeartbeatServices(HeartbeatServices heartbeatServices) {
this.heartbeatServices = heartbeatServices;
return this;
}
public Builder setExecutionPlanWriter(ExecutionPlanWriter executionPlanWriter) {
this.executionPlanWriter = executionPlanWriter;
return this;
}
public Builder setJobResultStore(JobResultStore jobResultStore) {
this.jobResultStore = jobResultStore;
return this;
}
public Builder setConfiguration(Configuration configuration) {
this.configuration = configuration;
return this;
}
public Builder setBlobServer(BlobServer blobServer) {
this.blobServer = blobServer;
return this;
}
public Builder setFatalErrorHandler(FatalErrorHandler fatalErrorHandler) {
this.fatalErrorHandler = fatalErrorHandler;
return this;
}
public Builder setJobManagerMetricGroup(JobManagerMetricGroup jobManagerMetricGroup) {
this.jobManagerMetricGroup = jobManagerMetricGroup;
return this;
}
public Builder setMetricServiceQueryAddress(@Nullable String metricServiceQueryAddress) {
this.metricServiceQueryAddress = metricServiceQueryAddress;
return this;
}
public Builder setIoExecutor(Executor ioExecutor) {
this.ioExecutor = ioExecutor;
return this;
}
public Builder setHistoryServerArchivist(HistoryServerArchivist historyServerArchivist) {
this.historyServerArchivist = historyServerArchivist;
return this;
}
public Builder setExecutionGraphInfoStore(ExecutionGraphInfoStore executionGraphInfoStore) {
this.executionGraphInfoStore = executionGraphInfoStore;
return this;
}
public Builder setJobManagerRunnerFactory(JobManagerRunnerFactory jobManagerRunnerFactory) {
this.jobManagerRunnerFactory = jobManagerRunnerFactory;
return this;
}
public Builder setCleanupRunnerFactory(CleanupRunnerFactory cleanupRunnerFactory) {
this.cleanupRunnerFactory = cleanupRunnerFactory;
return this;
}
public Builder setDispatcherBootstrapFactory(
DispatcherBootstrapFactory dispatcherBootstrapFactory) {
this.dispatcherBootstrapFactory = dispatcherBootstrapFactory;
return this;
}
public Builder setDispatcherOperationCaches(
DispatcherOperationCaches dispatcherOperationCaches) {
this.dispatcherOperationCaches = dispatcherOperationCaches;
return this;
}
public Builder setJobManagerRunnerRegistry(
JobManagerRunnerRegistry jobManagerRunnerRegistry) {
this.jobManagerRunnerRegistry = jobManagerRunnerRegistry;
return this;
}
public Builder setResourceCleanerFactory(ResourceCleanerFactory resourceCleanerFactory) {
this.resourceCleanerFactory = resourceCleanerFactory;
return this;
}
private ResourceCleanerFactory createDefaultResourceCleanerFactory() {
return new DispatcherResourceCleanerFactory(
ioExecutor,
TestingRetryStrategies.NO_RETRY_STRATEGY,
jobManagerRunnerRegistry,
executionPlanWriter,
blobServer,
highAvailabilityServices,
jobManagerMetricGroup);
}
public TestingDispatcher build(RpcService rpcService) throws Exception {
return new TestingDispatcher(
rpcService,
fencingToken,
recoveredJobs,
recoveredDirtyJobs == null
? jobResultStore.getDirtyResults()
: recoveredDirtyJobs,
configuration,
highAvailabilityServices,
resourceManagerGatewayRetriever,
heartbeatServices,
Preconditions.checkNotNull(
blobServer,
"No BlobServer is specified for building the TestingDispatcher"),
fatalErrorHandler,
executionPlanWriter,
jobResultStore,
jobManagerMetricGroup,
metricServiceQueryAddress,
ioExecutor,
historyServerArchivist,
executionGraphInfoStore,
jobManagerRunnerFactory,
cleanupRunnerFactory,
dispatcherBootstrapFactory,
dispatcherOperationCaches,
jobManagerRunnerRegistry,
resourceCleanerFactory != null
? resourceCleanerFactory
: createDefaultResourceCleanerFactory());
}
}
}
| Builder |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/type/ArgumentValue.java | {
"start": 761,
"end": 1285
} | interface ____<V> extends Argument<V> {
/**
* @return The current value of the argument
*/
V getValue();
/**
* Create a new {@link ArgumentValue} for the given {@link Argument} and value.
*
* @param argument The argument
* @param value The value
* @param <T> The value type
* @return The created instance
*/
static <T> ArgumentValue<T> create(Argument<T> argument, T value) {
return new DefaultArgumentValue<>(argument, value);
}
}
| ArgumentValue |
java | google__dagger | javatests/dagger/internal/codegen/SwitchingProviderTest.java | {
"start": 1039,
"end": 2400
} | class ____ {
@Parameters(name = "{0}")
public static ImmutableList<Object[]> parameters() {
return CompilerMode.TEST_PARAMETERS;
}
@Rule public GoldenFileRule goldenFileRule = new GoldenFileRule();
private final CompilerMode compilerMode;
public SwitchingProviderTest(CompilerMode compilerMode) {
this.compilerMode = compilerMode;
}
@Test
public void switchingProviderTest() throws Exception {
ImmutableList.Builder<Source> sources = ImmutableList.builder();
StringBuilder entryPoints = new StringBuilder();
for (int i = 0; i <= 100; i++) {
String bindingName = "Binding" + i;
sources.add(
CompilerTests.javaSource(
"test." + bindingName,
"package test;",
"",
"import javax.inject.Inject;",
"",
"final class " + bindingName + " {",
" @Inject",
" " + bindingName + "() {}",
"}"));
entryPoints.append(String.format(" Provider<%1$s> get%1$sProvider();\n", bindingName));
}
sources.add(
CompilerTests.javaSource(
"test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"import javax.inject.Provider;",
"",
"@Component",
" | SwitchingProviderTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/SlashTest.java | {
"start": 172,
"end": 540
} | class ____ extends TestCase {
public void test_0 () throws Exception {
String text = "{\"errorMessage\":\"resource '/rpc/hello/none.json' is not found !\"}";
JSONObject json = (JSONObject) JSON.parse(text);
Assert.assertEquals("{\"errorMessage\":\"resource '/rpc/hello/none.json' is not found !\"}", json.toString());
}
}
| SlashTest |
java | google__jimfs | jimfs/src/main/java/com/google/common/jimfs/UserDefinedAttributeProvider.java | {
"start": 1380,
"end": 3444
} | class ____ extends AttributeProvider {
UserDefinedAttributeProvider() {}
@Override
public String name() {
return "user";
}
@Override
public ImmutableSet<String> fixedAttributes() {
// no fixed set of attributes for this view
return ImmutableSet.of();
}
@Override
public boolean supports(String attribute) {
// any attribute name is supported
return true;
}
@Override
public ImmutableSet<String> attributes(File file) {
return userDefinedAttributes(file);
}
private static ImmutableSet<String> userDefinedAttributes(File file) {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
for (String attribute : file.getAttributeNames("user")) {
builder.add(attribute);
}
return builder.build();
}
@Override
public @Nullable Object get(File file, String attribute) {
Object value = file.getAttribute("user", attribute);
if (value instanceof byte[]) {
byte[] bytes = (byte[]) value;
return bytes.clone();
}
return null;
}
@Override
public void set(File file, String view, String attribute, Object value, boolean create) {
checkNotNull(value);
checkNotCreate(view, attribute, create);
byte[] bytes;
if (value instanceof byte[]) {
bytes = ((byte[]) value).clone();
} else if (value instanceof ByteBuffer) {
// value instanceof ByteBuffer
ByteBuffer buffer = (ByteBuffer) value;
bytes = new byte[buffer.remaining()];
buffer.get(bytes);
} else {
throw invalidType(view, attribute, value, byte[].class, ByteBuffer.class);
}
file.setAttribute("user", attribute, bytes);
}
@Override
public Class<UserDefinedFileAttributeView> viewType() {
return UserDefinedFileAttributeView.class;
}
@Override
public UserDefinedFileAttributeView view(
FileLookup lookup, ImmutableMap<String, FileAttributeView> inheritedViews) {
return new View(lookup);
}
/** Implementation of {@link UserDefinedFileAttributeView}. */
private static | UserDefinedAttributeProvider |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/SamplingMethodTests.java | {
"start": 451,
"end": 1639
} | class ____ extends ESTestCase {
private static int NUM_TEST_RUNS = 10;
abstract SamplingMethod createInstance();
abstract boolean isDescending();
public void testMonotonic() {
SamplingMethod method = createInstance();
double[] cdfs = method.cdfPoints();
double lft = cdfs[0];
for (int i = 1; i < cdfs.length; i++) {
assertThat(
"failed monotonic test [" + (isDescending() ? "desc" : "asc") + "] at point [" + i + "]",
lft,
isDescending() ? greaterThan(cdfs[i]) : lessThan(cdfs[i])
);
lft = cdfs[i];
}
}
public void testAllPositive() {
SamplingMethod method = createInstance();
double[] cdfs = method.cdfPoints();
for (int i = 0; i < cdfs.length; i++) {
assertThat(cdfs[i], greaterThan(0.0));
}
}
public void testConsistent() {
for (int j = 0; j < NUM_TEST_RUNS; j++) {
SamplingMethod lft = createInstance();
SamplingMethod rgt = createInstance();
assertArrayEquals(lft.cdfPoints(), rgt.cdfPoints(), 0.0);
}
}
}
| SamplingMethodTests |
java | junit-team__junit5 | documentation/src/test/java/example/extensions/ParameterResolverCustomTypeDemo.java | {
"start": 948,
"end": 1352
} | class ____ implements ParameterResolver {
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) {
return parameterContext.getParameter().getType().equals(Integer.class);
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) {
return 1;
}
}
static | FirstIntegerResolver |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java | {
"start": 6311,
"end": 6545
} | class ____ parent: " + name + " ");
}
}
if (c == null) {
throw ex != null ? ex : new ClassNotFoundException(name);
}
if (resolve) {
resolveClass(c);
}
return c;
}
/**
* Checks if a | from |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/converters/RuntimeParamConverterTest.java | {
"start": 850,
"end": 2187
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(ParamConverterEndpoint.class, OptionalIntegerParamConverterProvider.class,
OptionalIntegerParamConverter.class);
}
});
@Test
void sendParameters() {
given().queryParam("number", 22)
.when().get("/param-converter")
.then()
.statusCode(200)
.body(Matchers.is("Hello, 22!"));
}
@Test
void doNotSendParameters() {
given().when().get("/param-converter")
.then()
.statusCode(200)
.body(Matchers.is("Hello, world! No number was provided."));
}
@Test
void sendEmptyParameter() {
given().queryParam("number", "")
.when().get("/param-converter")
.then()
.statusCode(200)
.body(Matchers.is("Hello, world! No number was provided."));
}
@ApplicationScoped
@Path("/param-converter")
public static | RuntimeParamConverterTest |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/ApplicationServiceRecordProcessor.java | {
"start": 9262,
"end": 10330
} | class ____
extends ApplicationRecordDescriptor<InetAddress> {
/**
* Creates an application A record descriptor.
*
* @param path registry path for service record
* @param record service record
* @throws Exception
*/
public AApplicationRecordDescriptor(String path,
ServiceRecord record) throws Exception {
super(record);
}
/**
* Initializes the descriptor parameters.
*
* @param serviceRecord the service record.
*/
@Override protected void init(ServiceRecord serviceRecord)
throws Exception {
this.setNames(new Name[] {getServiceName()});
List<Endpoint> endpoints = serviceRecord.external;
if (endpoints.isEmpty()) {
return;
}
// TODO: do we need a "hostname" attribute for an application record or
// can we rely on the first endpoint record.
this.setTarget(InetAddress.getByName(
getHost(endpoints.get(0))));
}
}
/**
* An application AAAA record descriptor.
*/
| AApplicationRecordDescriptor |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleDematerialize.java | {
"start": 1238,
"end": 1731
} | class ____<T, R> extends Maybe<R> {
final Single<T> source;
final Function<? super T, Notification<R>> selector;
public SingleDematerialize(Single<T> source, Function<? super T, Notification<R>> selector) {
this.source = source;
this.selector = selector;
}
@Override
protected void subscribeActual(MaybeObserver<? super R> observer) {
source.subscribe(new DematerializeObserver<>(observer, selector));
}
static final | SingleDematerialize |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ImportAwareTests.java | {
"start": 8024,
"end": 8434
} | class ____ implements BeanPostProcessor, BeanFactoryAware {
@Override
public void setBeanFactory(BeanFactory beanFactory) {
}
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) {
return bean;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) {
return bean;
}
}
@Configuration
@EnableImportRegistrar
static | BPP |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Attachment.java | {
"start": 8908,
"end": 9332
} | class ____ extends XContent {
public Json(String id, ToXContent content) {
super(id, content, XContentType.JSON);
}
public Json(String id, String name, ToXContent content) {
super(id, name, content, XContentType.JSON);
}
@Override
public String type() {
return "json";
}
}
}
}
| Json |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/net/protocol/jar/JarUrlClassLoader.java | {
"start": 1290,
"end": 1769
} | class ____ extends URLClassLoader {
static {
ClassLoader.registerAsParallelCapable();
}
private final URL[] urls;
private final boolean hasJarUrls;
private final Map<URL, JarFile> jarFiles = new ConcurrentHashMap<>();
private final Set<String> undefinablePackages = ConcurrentHashMap.newKeySet();
/**
* Create a new {@link LaunchedClassLoader} instance.
* @param urls the URLs from which to load classes and resources
* @param parent the parent | JarUrlClassLoader |
java | dropwizard__dropwizard | dropwizard-lifecycle/src/main/java/io/dropwizard/lifecycle/Managed.java | {
"start": 44,
"end": 150
} | interface ____ objects which need to take some action as the application is started or stopped.
*/
public | for |
java | google__guava | android/guava-tests/test/com/google/common/graph/InvalidatableSetTest.java | {
"start": 442,
"end": 2264
} | class ____ {
Set<Integer> wrappedSet;
Set<Integer> copyOfWrappedSet;
InvalidatableSet<Integer> setToTest;
@Before
public void createSets() {
wrappedSet = new HashSet<>();
wrappedSet.add(1);
wrappedSet.add(2);
wrappedSet.add(3);
copyOfWrappedSet = ImmutableSet.copyOf(wrappedSet);
setToTest =
InvalidatableSet.of(wrappedSet, () -> wrappedSet.contains(1), () -> 1 + "is not present");
}
@Test
@SuppressWarnings("TruthSelfEquals")
public void testEquals() {
// sanity check on construction of copyOfWrappedSet
assertThat(wrappedSet).isEqualTo(copyOfWrappedSet);
// test that setToTest is still valid
assertThat(setToTest).isEqualTo(wrappedSet);
assertThat(setToTest).isEqualTo(copyOfWrappedSet);
// invalidate setToTest
wrappedSet.remove(1);
// sanity check on update of wrappedSet
assertThat(wrappedSet).isNotEqualTo(copyOfWrappedSet);
ImmutableSet<Integer> copyOfModifiedSet = ImmutableSet.copyOf(wrappedSet); // {2,3}
// sanity check on construction of copyOfModifiedSet
assertThat(wrappedSet).isEqualTo(copyOfModifiedSet);
// setToTest should throw when it calls equals(), or equals is called on it, except for itself
assertThat(setToTest).isEqualTo(setToTest);
assertThrows(IllegalStateException.class, () -> setToTest.equals(wrappedSet));
assertThrows(IllegalStateException.class, () -> setToTest.equals(copyOfWrappedSet));
assertThrows(IllegalStateException.class, () -> setToTest.equals(copyOfModifiedSet));
assertThrows(IllegalStateException.class, () -> wrappedSet.equals(setToTest));
assertThrows(IllegalStateException.class, () -> copyOfWrappedSet.equals(setToTest));
assertThrows(IllegalStateException.class, () -> copyOfModifiedSet.equals(setToTest));
}
}
| InvalidatableSetTest |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/server/ExchangeMatcherRedirectWebFilter.java | {
"start": 1291,
"end": 2419
} | class ____ implements WebFilter {
private final ServerRedirectStrategy redirectStrategy = new DefaultServerRedirectStrategy();
private final ServerWebExchangeMatcher exchangeMatcher;
private final URI redirectUri;
/**
* Create and initialize an instance of the web filter.
* @param exchangeMatcher the exchange matcher
* @param redirectUrl the redirect URL
*/
public ExchangeMatcherRedirectWebFilter(ServerWebExchangeMatcher exchangeMatcher, String redirectUrl) {
Assert.notNull(exchangeMatcher, "exchangeMatcher cannot be null");
Assert.hasText(redirectUrl, "redirectUrl cannot be empty");
this.exchangeMatcher = exchangeMatcher;
this.redirectUri = URI.create(redirectUrl);
}
/**
* {@inheritDoc}
*/
@Override
public Mono<Void> filter(ServerWebExchange exchange, WebFilterChain chain) {
// @formatter:off
return this.exchangeMatcher.matches(exchange)
.filter(MatchResult::isMatch)
.switchIfEmpty(chain.filter(exchange).then(Mono.empty()))
.flatMap((result) -> this.redirectStrategy.sendRedirect(exchange, this.redirectUri));
// @formatter:on
}
}
| ExchangeMatcherRedirectWebFilter |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/deftyping/DefaultTypeAbstractMapping3235Test.java | {
"start": 789,
"end": 841
} | class ____ extends Parent { }
static abstract | Child |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/ComplexWebApplicationContext.java | {
"start": 9127,
"end": 9290
} | interface ____ {
void doSomething(HttpServletRequest request) throws ServletException, IllegalAccessException;
long lastModified();
}
public static | MyHandler |
java | quarkusio__quarkus | extensions/security-webauthn/runtime/src/main/java/io/quarkus/security/webauthn/WebAuthnSecurity.java | {
"start": 4219,
"end": 35796
} | class ____ {
/*
* Android Keystore Root is not published anywhere.
* This certificate was extracted from one of the attestations
* The last certificate in x5c must match this certificate
* This needs to be checked to ensure that malicious party won't generate fake attestations
*/
private static final String ANDROID_KEYSTORE_ROOT = "MIICizCCAjKgAwIBAgIJAKIFntEOQ1tXMAoGCCqGSM49BAMCMIGYMQswCQYDVQQG" +
"EwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmll" +
"dzEVMBMGA1UECgwMR29vZ2xlLCBJbmMuMRAwDgYDVQQLDAdBbmRyb2lkMTMwMQYD" +
"VQQDDCpBbmRyb2lkIEtleXN0b3JlIFNvZnR3YXJlIEF0dGVzdGF0aW9uIFJvb3Qw" +
"HhcNMTYwMTExMDA0MzUwWhcNMzYwMTA2MDA0MzUwWjCBmDELMAkGA1UEBhMCVVMx" +
"EzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDU1vdW50YWluIFZpZXcxFTAT" +
"BgNVBAoMDEdvb2dsZSwgSW5jLjEQMA4GA1UECwwHQW5kcm9pZDEzMDEGA1UEAwwq" +
"QW5kcm9pZCBLZXlzdG9yZSBTb2Z0d2FyZSBBdHRlc3RhdGlvbiBSb290MFkwEwYH" +
"KoZIzj0CAQYIKoZIzj0DAQcDQgAE7l1ex+HA220Dpn7mthvsTWpdamguD/9/SQ59" +
"dx9EIm29sa/6FsvHrcV30lacqrewLVQBXT5DKyqO107sSHVBpKNjMGEwHQYDVR0O" +
"BBYEFMit6XdMRcOjzw0WEOR5QzohWjDPMB8GA1UdIwQYMBaAFMit6XdMRcOjzw0W" +
"EOR5QzohWjDPMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgKEMAoGCCqG" +
"SM49BAMCA0cAMEQCIDUho++LNEYenNVg8x1YiSBq3KNlQfYNns6KGYxmSGB7AiBN" +
"C/NR2TB8fVvaNTQdqEcbY6WFZTytTySn502vQX3xvw==";
// https://aboutssl.org/globalsign-root-certificates-licensing-and-use/
// Name gsr1
// Thumbprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
// Valid Until 28 January 2028
private static final String GSR1 = "MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG\n" +
"A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv\n" +
"b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw\n" +
"MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i\n" +
"YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT\n" +
"aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ\n" +
"jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp\n" +
"xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp\n" +
"1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG\n" +
"snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ\n" +
"U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8\n" +
"9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E\n" +
"BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B\n" +
"AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz\n" +
"yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE\n" +
"38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP\n" +
"AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad\n" +
"DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME\n" +
"HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==";
/**
* Apple WebAuthn Root CA PEM
* <p>
* Downloaded from https://www.apple.com/certificateauthority/Apple_WebAuthn_Root_CA.pem
* <p>
* Valid until 03/14/2045 @ 5:00 PM PST
*/
private static final String APPLE_WEBAUTHN_ROOT_CA = "MIICEjCCAZmgAwIBAgIQaB0BbHo84wIlpQGUKEdXcTAKBggqhkjOPQQDAzBLMR8w" +
"HQYDVQQDDBZBcHBsZSBXZWJBdXRobiBSb290IENBMRMwEQYDVQQKDApBcHBsZSBJ" +
"bmMuMRMwEQYDVQQIDApDYWxpZm9ybmlhMB4XDTIwMDMxODE4MjEzMloXDTQ1MDMx" +
"NTAwMDAwMFowSzEfMB0GA1UEAwwWQXBwbGUgV2ViQXV0aG4gUm9vdCBDQTETMBEG" +
"A1UECgwKQXBwbGUgSW5jLjETMBEGA1UECAwKQ2FsaWZvcm5pYTB2MBAGByqGSM49" +
"AgEGBSuBBAAiA2IABCJCQ2pTVhzjl4Wo6IhHtMSAzO2cv+H9DQKev3//fG59G11k" +
"xu9eI0/7o6V5uShBpe1u6l6mS19S1FEh6yGljnZAJ+2GNP1mi/YK2kSXIuTHjxA/" +
"pcoRf7XkOtO4o1qlcaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUJtdk" +
"2cV4wlpn0afeaxLQG2PxxtcwDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA" +
"MGQCMFrZ+9DsJ1PW9hfNdBywZDsWDbWFp28it1d/5w2RPkRX3Bbn/UbDTNLx7Jr3" +
"jAGGiQIwHFj+dJZYUJR786osByBelJYsVZd2GbHQu209b5RCmGQ21gpSAk9QZW4B" +
"1bWeT0vT";
/**
* Default FIDO2 MDS3 ROOT Certificate
* <p>
* Downloaded from https://valid.r3.roots.globalsign.com/
* <p>
* Valid until 18 March 2029
*/
private static final String FIDO_MDS3_ROOT_CERTIFICATE = "MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G"
+
"A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp" +
"Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4" +
"MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG" +
"A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI" +
"hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8" +
"RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT" +
"gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm" +
"KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd" +
"QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ" +
"XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw" +
"DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o" +
"LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU" +
"RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp" +
"jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK" +
"6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX" +
"mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs" +
"Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH" +
"WD9f";
@Inject
TlsConfigurationRegistry certificates;
@Inject
WebAuthnAuthenticationMechanism authMech;
@Inject
WebAuthnAuthenticatorStorage storage;
private ObjectConverter objectConverter = new ObjectConverter();
private WebAuthnAsyncManager webAuthn;
private VertxContextPRNG random;
private String challengeCookie;
private List<String> origins;
private String rpId;
private String rpName;
private UserVerification userVerification;
private Boolean userPresenceRequired;
private List<PublicKeyCredentialParameters> pubKeyCredParams;
private ResidentKey residentKey;
private Duration timeout;
private int challengeLength;
private AuthenticatorAttachment authenticatorAttachment;
private Attestation attestation;
public WebAuthnSecurity(WebAuthnRunTimeConfig config, Vertx vertx, WebAuthnAuthenticatorStorage database) {
// apply config defaults
this.rpId = config.relyingParty().id().orElse(null);
this.rpName = config.relyingParty().name();
this.origins = config.origins().orElse(Collections.emptyList());
this.challengeCookie = config.challengeCookieName();
this.challengeLength = config.challengeLength().orElse(64);
this.userPresenceRequired = config.userPresenceRequired().orElse(true);
this.timeout = config.timeout().orElse(Duration.ofMinutes(5));
if (config.publicKeyCredentialParameters().isPresent()) {
this.pubKeyCredParams = new ArrayList<>(config.publicKeyCredentialParameters().get().size());
for (COSEAlgorithm publicKeyCredential : config.publicKeyCredentialParameters().get()) {
this.pubKeyCredParams.add(new PublicKeyCredentialParameters(PublicKeyCredentialType.PUBLIC_KEY,
COSEAlgorithmIdentifier.create(publicKeyCredential.coseId())));
}
} else {
this.pubKeyCredParams = new ArrayList<>(2);
this.pubKeyCredParams
.add(new PublicKeyCredentialParameters(PublicKeyCredentialType.PUBLIC_KEY, COSEAlgorithmIdentifier.ES256));
this.pubKeyCredParams
.add(new PublicKeyCredentialParameters(PublicKeyCredentialType.PUBLIC_KEY, COSEAlgorithmIdentifier.RS256));
}
this.authenticatorAttachment = config.authenticatorAttachment().orElse(null);
this.userVerification = config.userVerification().orElse(UserVerification.REQUIRED);
this.residentKey = config.residentKey().orElse(ResidentKey.REQUIRED);
this.attestation = config.attestation().orElse(Attestation.NONE);
// create the webauthn4j manager
this.webAuthn = makeWebAuthn(vertx, config);
this.random = VertxContextPRNG.current(vertx);
}
private String randomBase64URLBuffer() {
final byte[] buff = new byte[challengeLength];
random.nextBytes(buff);
return Base64UrlUtil.encodeToString(buff);
}
private WebAuthnAsyncManager makeWebAuthn(Vertx vertx, WebAuthnRunTimeConfig config) {
if (config.attestation().isPresent()
&& config.attestation().get() != WebAuthnRunTimeConfig.Attestation.NONE) {
TrustAnchorAsyncRepository something;
// FIXME: make config name configurable?
Optional<TlsConfiguration> webauthnTlsConfiguration = certificates.get("webauthn");
KeyStore trustStore;
if (webauthnTlsConfiguration.isPresent()) {
trustStore = webauthnTlsConfiguration.get().getTrustStore();
} else {
try {
trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
addCert(trustStore, ANDROID_KEYSTORE_ROOT);
addCert(trustStore, APPLE_WEBAUTHN_ROOT_CA);
addCert(trustStore, FIDO_MDS3_ROOT_CERTIFICATE);
addCert(trustStore, GSR1);
} catch (CertificateException | KeyStoreException | NoSuchAlgorithmException | IOException e) {
throw new RuntimeException("Failed to configure default WebAuthn certificates", e);
}
}
Set<TrustAnchor> trustAnchors = new HashSet<>();
try {
Enumeration<String> aliases = trustStore.aliases();
while (aliases.hasMoreElements()) {
trustAnchors.add(new TrustAnchor((X509Certificate) trustStore.getCertificate(aliases.nextElement()), null));
}
} catch (KeyStoreException e) {
throw new RuntimeException("Failed to configure WebAuthn trust store", e);
}
// FIXME CLRs are not supported yet
something = new KeyStoreTrustAnchorAsyncRepository(trustStore);
if (config.loadMetadata().orElse(false)) {
HttpAsyncClient httpClient = new VertxHttpAsyncClient(vertx);
FidoMDS3MetadataBLOBAsyncProvider blobAsyncProvider = new FidoMDS3MetadataBLOBAsyncProvider(objectConverter,
FidoMDS3MetadataBLOBAsyncProvider.DEFAULT_BLOB_ENDPOINT, httpClient, trustAnchors);
something = new MetadataBLOBBasedTrustAnchorAsyncRepository(blobAsyncProvider);
}
return new WebAuthnAsyncManager(
Arrays.asList(
new FIDOU2FAttestationStatementAsyncVerifier(),
new PackedAttestationStatementAsyncVerifier(),
new TPMAttestationStatementAsyncVerifier(),
new AndroidKeyAttestationStatementAsyncVerifier(),
new AndroidSafetyNetAttestationStatementAsyncVerifier(),
new AppleAnonymousAttestationStatementAsyncVerifier()),
new DefaultCertPathTrustworthinessAsyncVerifier(something),
new DefaultSelfAttestationTrustworthinessAsyncVerifier(),
objectConverter);
} else {
return WebAuthnAsyncManager.createNonStrictWebAuthnAsyncManager(objectConverter);
}
}
private void addCert(KeyStore keyStore, String pemCertificate) throws CertificateException, KeyStoreException {
X509Certificate cert = JWS.parseX5c(pemCertificate);
CertInfo info = CertificateHelper.getCertInfo(cert);
keyStore.setCertificateEntry(info.subject("CN"), cert);
}
private static byte[] uUIDBytes(UUID uuid) {
Buffer buffer = Buffer.buffer(16);
buffer.setLong(0, uuid.getMostSignificantBits());
buffer.setLong(8, uuid.getLeastSignificantBits());
return buffer.getBytes();
}
/**
* Obtains a registration challenge for the given required username and displayName. This will also
* create and save a challenge in a session cookie.
*
* @param username the username for the registration
* @param displayName the displayName for the registration
* @param ctx the Vert.x context
* @return the registration challenge.
*/
@SuppressWarnings("unused")
public Uni<PublicKeyCredentialCreationOptions> getRegisterChallenge(String username, String displayName,
RoutingContext ctx) {
if (username == null || username.isEmpty()) {
return Uni.createFrom().failure(new IllegalArgumentException("Username is required"));
}
// default displayName to username, but it's required really
if (displayName == null || displayName.isEmpty()) {
displayName = username;
}
String finalDisplayName = displayName;
String challenge = getOrCreateChallenge(ctx);
Origin origin = Origin.create(!this.origins.isEmpty() ? this.origins.get(0) : ctx.request().absoluteURI());
String rpId = this.rpId != null ? this.rpId : origin.getHost();
return storage.findByUsername(username)
.map(credentials -> {
List<PublicKeyCredentialDescriptor> excluded;
// See https://github.com/quarkusio/quarkus/issues/44292 for why this is currently disabled
if (false) {
excluded = new ArrayList<>(credentials.size());
for (WebAuthnCredentialRecord credential : credentials) {
excluded.add(new PublicKeyCredentialDescriptor(PublicKeyCredentialType.PUBLIC_KEY,
credential.getAttestedCredentialData().getCredentialId(),
credential.getTransports()));
}
} else {
excluded = Collections.emptyList();
}
PublicKeyCredentialCreationOptions publicKeyCredentialCreationOptions = new PublicKeyCredentialCreationOptions(
new PublicKeyCredentialRpEntity(
rpId,
rpName),
new PublicKeyCredentialUserEntity(
uUIDBytes(UUID.randomUUID()),
username,
finalDisplayName),
new DefaultChallenge(challenge),
pubKeyCredParams,
timeout.getSeconds() * 1000,
excluded,
new AuthenticatorSelectionCriteria(
authenticatorAttachment != null ? authenticatorAttachment.toWebAuthn4J() : null,
residentKey == ResidentKey.REQUIRED,
residentKey.toWebAuthn4J(),
userVerification.toWebAuthn4J()),
attestation.toWebAuthn4J(),
new AuthenticationExtensionsClientInputs<>());
// save challenge to the session
authMech.getLoginManager().save(challenge, ctx, challengeCookie, null,
ctx.request().isSSL());
return publicKeyCredentialCreationOptions;
});
}
/**
* Obtains a login challenge for the given optional username. This will also
* create and save a challenge in a session cookie.
*
* @param username the optional username for the login
* @param ctx the Vert.x context
* @return the login challenge.
*/
@SuppressWarnings("unused")
public Uni<PublicKeyCredentialRequestOptions> getLoginChallenge(String username, RoutingContext ctx) {
// Username is not required with passkeys
if (username == null) {
username = "";
}
String finalUsername = username;
String challenge = getOrCreateChallenge(ctx);
Origin origin = Origin.create(!this.origins.isEmpty() ? this.origins.get(0) : ctx.request().absoluteURI());
String rpId = this.rpId != null ? this.rpId : origin.getHost();
// do not attempt to look users up if there's no user name
Uni<List<WebAuthnCredentialRecord>> credentialsUni;
if (username.isEmpty()) {
credentialsUni = Uni.createFrom().item(Collections.emptyList());
} else {
credentialsUni = storage.findByUsername(username);
}
return credentialsUni
.map(credentials -> {
List<PublicKeyCredentialDescriptor> allowedCredentials;
// See https://github.com/quarkusio/quarkus/issues/44292 for why this is currently disabled
if (false) {
if (credentials.isEmpty()) {
throw new RuntimeException("No credentials found for " + finalUsername);
}
allowedCredentials = new ArrayList<>(credentials.size());
for (WebAuthnCredentialRecord credential : credentials) {
allowedCredentials.add(new PublicKeyCredentialDescriptor(PublicKeyCredentialType.PUBLIC_KEY,
credential.getAttestedCredentialData().getCredentialId(),
credential.getTransports()));
}
} else {
allowedCredentials = Collections.emptyList();
}
PublicKeyCredentialRequestOptions publicKeyCredentialRequestOptions = new PublicKeyCredentialRequestOptions(
new DefaultChallenge(challenge),
timeout.getSeconds() * 1000,
rpId,
allowedCredentials,
userVerification.toWebAuthn4J(),
null);
// save challenge to the session
authMech.getLoginManager().save(challenge, ctx, challengeCookie, null,
ctx.request().isSSL());
return publicKeyCredentialRequestOptions;
});
}
private String getOrCreateChallenge(RoutingContext ctx) {
RestoreResult challengeRestoreResult = authMech.getLoginManager().restore(ctx, challengeCookie);
String challenge;
if (challengeRestoreResult == null || challengeRestoreResult.getPrincipal() == null
|| challengeRestoreResult.getPrincipal().isEmpty()) {
challenge = randomBase64URLBuffer();
} else {
challenge = challengeRestoreResult.getPrincipal();
}
return challenge;
}
/**
* Registers a new WebAuthn credentials. This will check it, clear the challenge cookie and return it in case of
* success, but not invoke {@link WebAuthnUserProvider#store(WebAuthnCredentialRecord)}, you have to do
* it manually in case of success. This will also not set a login cookie, you have to do it manually using
* {@link #rememberUser(String, RoutingContext)}
* or using any other way.
*
* @param the username to register credentials for
* @param response the Webauthn registration info
* @param ctx the current request
* @return the newly created credentials
*/
public Uni<WebAuthnCredentialRecord> register(String username, WebAuthnRegisterResponse response, RoutingContext ctx) {
return register(username, response.toJsonObject(), ctx);
}
/**
* Registers a new WebAuthn credentials. This will check it, clear the challenge cookie and return it in case of
* success, but not invoke {@link WebAuthnUserProvider#store(WebAuthnCredentialRecord)}, you have to do
* it manually in case of success. This will also not set a login cookie, you have to do it manually using
* {@link #rememberUser(String, RoutingContext)}
* or using any other way.
*
* @param the username to register credentials for
* @param response the Webauthn registration info
* @param ctx the current request
* @return the newly created credentials
*/
public Uni<WebAuthnCredentialRecord> register(String username, JsonObject response, RoutingContext ctx) {
RestoreResult challenge = authMech.getLoginManager().restore(ctx, challengeCookie);
if (challenge == null || challenge.getPrincipal() == null || challenge.getPrincipal().isEmpty()) {
return Uni.createFrom().failure(new RuntimeException("Missing challenge"));
}
if (username == null || username.isEmpty()) {
return Uni.createFrom().failure(new RuntimeException("Missing username"));
}
// input validation
if (response == null ||
!containsRequiredString(response, "id") ||
!containsRequiredString(response, "rawId") ||
!containsRequiredObject(response, "response") ||
!containsOptionalString(response.getJsonObject("response"), "userHandle") ||
!containsRequiredString(response, "type") ||
!"public-key".equals(response.getString("type"))) {
return Uni.createFrom().failure(new IllegalArgumentException(
"Response missing one or more of id/rawId/response[.userHandle]/type fields, or type is not public-key"));
}
String registrationResponseJSON = response.encode();
ServerProperty serverProperty = makeServerProperty(challenge, ctx);
RegistrationParameters registrationParameters = new RegistrationParameters(serverProperty, pubKeyCredParams,
userVerification == UserVerification.REQUIRED, userPresenceRequired);
return Uni.createFrom()
.completionStage(webAuthn.verifyRegistrationResponseJSON(registrationResponseJSON, registrationParameters))
.eventually(() -> {
removeCookie(ctx, challengeCookie);
}).map(registrationData -> new WebAuthnCredentialRecord(
username,
registrationData.getAttestationObject(),
registrationData.getCollectedClientData(),
registrationData.getClientExtensions(),
registrationData.getTransports()));
}
private ServerProperty makeServerProperty(RestoreResult challenge, RoutingContext ctx) {
Set<Origin> origins = new HashSet<>();
Origin firstOrigin = null;
if (this.origins.isEmpty()) {
firstOrigin = Origin.create(ctx.request().absoluteURI());
origins.add(firstOrigin);
} else {
for (String origin : this.origins) {
Origin newOrigin = Origin.create(origin);
if (firstOrigin == null) {
firstOrigin = newOrigin;
}
origins.add(newOrigin);
}
}
String rpId = this.rpId != null ? this.rpId : firstOrigin.getHost();
DefaultChallenge challengeObject = new DefaultChallenge(challenge.getPrincipal());
return new ServerProperty(origins, rpId, challengeObject, /* this is deprecated in Level 3, so ignore it */ null);
}
/**
* Logs an existing WebAuthn user in. This will check it, clear the challenge cookie and return the updated credentials in
* case of
* success, but not invoke {@link WebAuthnUserProvider#update(String, long)}, you have to do
* it manually in case of success. This will also not set a login cookie, you have to do it manually using
* {@link #rememberUser(String, RoutingContext)}
* or using any other way.
*
* @param response the Webauthn login info
* @param ctx the current request
* @return the updated credentials
*/
public Uni<WebAuthnCredentialRecord> login(WebAuthnLoginResponse response, RoutingContext ctx) {
return login(response.toJsonObject(), ctx);
}
/**
* Logs an existing WebAuthn user in. This will check it, clear the challenge cookie and return the updated credentials in
* case of
* success, but not invoke {@link WebAuthnUserProvider#update(String, long)}, you have to do
* it manually in case of success. This will also not set a login cookie, you have to do it manually using
* {@link #rememberUser(String, RoutingContext)}
* or using any other way.
*
* @param response the Webauthn login info
* @param ctx the current request
* @return the updated credentials
*/
public Uni<WebAuthnCredentialRecord> login(JsonObject response, RoutingContext ctx) {
RestoreResult challenge = authMech.getLoginManager().restore(ctx, challengeCookie);
if (challenge == null || challenge.getPrincipal() == null || challenge.getPrincipal().isEmpty()
// although login can be empty, we should still have a cookie for it
) {
return Uni.createFrom().failure(new RuntimeException("Missing challenge"));
}
// input validation
if (response == null ||
!containsRequiredString(response, "id") ||
!containsRequiredString(response, "rawId") ||
!containsRequiredObject(response, "response") ||
!containsOptionalString(response.getJsonObject("response"), "userHandle") ||
!containsRequiredString(response, "type") ||
!"public-key".equals(response.getString("type"))) {
return Uni.createFrom().failure(new IllegalArgumentException(
"Response missing one or more of id/rawId/response[.userHandle]/type fields, or type is not public-key"));
}
String authenticationResponseJSON = response.encode();
// validated
String rawId = response.getString("rawId");
ServerProperty serverProperty = makeServerProperty(challenge, ctx);
return storage.findByCredID(rawId)
.chain(credentialRecord -> {
List<byte[]> allowCredentials = List.of(Base64UrlUtil.decode(rawId));
AuthenticationParameters authenticationParameters = new AuthenticationParameters(serverProperty,
credentialRecord, allowCredentials,
userVerification == UserVerification.REQUIRED, userPresenceRequired);
return Uni.createFrom()
.completionStage(webAuthn.verifyAuthenticationResponseJSON(authenticationResponseJSON,
authenticationParameters))
.eventually(() -> {
removeCookie(ctx, challengeCookie);
}).map(authenticationData -> credentialRecord);
});
}
static void removeCookie(RoutingContext ctx, String name) {
// Vert.x sends back a set-cookie with max-age and expiry but no path, so we have to set it first,
// otherwise web clients don't clear it
Cookie cookie = ctx.request().getCookie(name);
if (cookie != null) {
cookie.setPath("/");
}
ctx.response().removeCookie(name);
}
/**
* Returns the underlying WebAuthn4J authenticator
*
* @return the underlying WebAuthn4J authenticator
*/
public WebAuthnAsyncManager getWebAuthn4J() {
return webAuthn;
}
/**
* Adds a login cookie to the current request for the given user ID
*
* @param userID the user ID to use as {@link Principal}
* @param ctx the current request, in order to add a cookie
*/
public void rememberUser(String userID, RoutingContext ctx) {
QuarkusSecurityIdentity.Builder builder = QuarkusSecurityIdentity.builder();
builder.setPrincipal(new QuarkusPrincipal(userID));
authMech.getLoginManager().save(builder.build(), ctx, null, ctx.request().isSSL());
}
/**
* Clears the login cookie on the current request
*
* @param ctx the current request, in order to clear the login cookie
*/
public void logout(RoutingContext ctx) {
authMech.getLoginManager().clear(ctx);
}
static boolean containsRequiredString(JsonObject json, String key) {
try {
if (json == null) {
return false;
}
if (!json.containsKey(key)) {
return false;
}
Object s = json.getValue(key);
return (s instanceof String) && !"".equals(s);
} catch (ClassCastException e) {
return false;
}
}
private static boolean containsOptionalString(JsonObject json, String key) {
try {
if (json == null) {
return true;
}
if (!json.containsKey(key)) {
return true;
}
Object s = json.getValue(key);
return (s instanceof String);
} catch (ClassCastException e) {
return false;
}
}
private static boolean containsRequiredObject(JsonObject json, String key) {
try {
if (json == null) {
return false;
}
if (!json.containsKey(key)) {
return false;
}
JsonObject s = json.getJsonObject(key);
return s != null;
} catch (ClassCastException e) {
return false;
}
}
public String toJsonString(PublicKeyCredentialCreationOptions challenge) {
return objectConverter.getJsonConverter().writeValueAsString(challenge);
}
public String toJsonString(PublicKeyCredentialRequestOptions challenge) {
return objectConverter.getJsonConverter().writeValueAsString(challenge);
}
/**
* Returns the list of allowed origins, or defaults to the current request's origin if unconfigured.
*/
public List<String> getAllowedOrigins(RoutingContext ctx) {
if (this.origins.isEmpty()) {
return List.of(Origin.create(ctx.request().absoluteURI()).toString());
} else {
return this.origins;
}
}
WebAuthnAuthenticatorStorage storage() {
return storage;
}
}
| WebAuthnSecurity |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java | {
"start": 1523,
"end": 3689
} | class ____ extends HTestCase {
@Test
public void testDirNoAnnotation() throws Exception {
assertThrows(IllegalStateException.class, () -> {
TestDirHelper.getTestDir();
});
}
@Test
public void testJettyNoAnnotation() throws Exception {
assertThrows(IllegalStateException.class, () -> {
TestJettyHelper.getJettyServer();
});
}
@Test
public void testJettyNoAnnotation2() throws Exception {
assertThrows(IllegalStateException.class, () -> {
TestJettyHelper.getJettyURL();
});
}
@Test
@TestDir
public void testDirAnnotation() throws Exception {
assertNotNull(TestDirHelper.getTestDir());
}
@Test
public void waitFor() {
long start = Time.now();
long waited = waitFor(1000, new Predicate() {
@Override
public boolean evaluate() throws Exception {
return true;
}
});
long end = Time.now();
assertEquals(waited, 0, 50);
assertEquals(end - start - waited, 0, 50);
}
@Test
public void waitForTimeOutRatio1() {
setWaitForRatio(1);
long start = Time.now();
long waited = waitFor(200, new Predicate() {
@Override
public boolean evaluate() throws Exception {
return false;
}
});
long end = Time.now();
assertEquals(waited, -1);
assertEquals(end - start, 200, 50);
}
@Test
public void waitForTimeOutRatio2() {
setWaitForRatio(2);
long start = Time.now();
long waited = waitFor(200, new Predicate() {
@Override
public boolean evaluate() throws Exception {
return false;
}
});
long end = Time.now();
assertEquals(waited, -1);
assertEquals(end - start, 200 * getWaitForRatio(), 50 * getWaitForRatio());
}
@Test
public void sleepRatio1() {
setWaitForRatio(1);
long start = Time.now();
sleep(100);
long end = Time.now();
assertEquals(end - start, 100, 50);
}
@Test
public void sleepRatio2() {
setWaitForRatio(1);
long start = Time.now();
sleep(100);
long end = Time.now();
assertEquals(end - start, 100 * getWaitForRatio(), 50 * getWaitForRatio());
}
public static | TestHTestCase |
java | apache__camel | components/camel-file/src/main/java/org/apache/camel/component/file/consumer/FileOffsetResumeAdapter.java | {
"start": 1005,
"end": 1243
} | interface ____ {
/**
* Sets the resume payload used for the adapter
*
* @param genericFile a generic file instance
*/
default void setResumePayload(GenericFile<File> genericFile) {
}
}
| FileOffsetResumeAdapter |
java | apache__camel | components/camel-telemetry/src/main/java/org/apache/camel/telemetry/decorators/ElasticsearchSpanDecorator.java | {
"start": 1038,
"end": 2235
} | class ____ extends AbstractSpanDecorator {
public static final String ELASTICSEARCH_DB_TYPE = "elasticsearch";
@Override
public String getComponent() {
return "elasticsearch";
}
@Override
public String getComponentClassName() {
return "org.apache.camel.component.es.ElasticsearchComponent";
}
@Override
public String getOperationName(Exchange exchange, Endpoint endpoint) {
Map<String, String> queryParameters = toQueryParameters(endpoint.getEndpointUri());
return queryParameters.containsKey("operation")
? queryParameters.get("operation")
: super.getOperationName(exchange, endpoint);
}
@Override
public void beforeTracingEvent(Span span, Exchange exchange, Endpoint endpoint) {
super.beforeTracingEvent(span, exchange, endpoint);
span.setTag(TagConstants.DB_SYSTEM, ELASTICSEARCH_DB_TYPE);
Map<String, String> queryParameters = toQueryParameters(endpoint.getEndpointUri());
if (queryParameters.containsKey("indexName")) {
span.setTag(TagConstants.DB_NAME, queryParameters.get("indexName"));
}
}
}
| ElasticsearchSpanDecorator |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-servlet/deployment/src/main/java/io/quarkus/resteasy/reactive/server/servlet/deployment/ResteasyReactiveServletProcessor.java | {
"start": 1086,
"end": 3237
} | class ____ {
private static final String JAVAX_WS_RS_APPLICATION = Application.class.getName();
private static final String JAX_RS_FILTER_NAME = JAVAX_WS_RS_APPLICATION;
private static final String JAX_RS_SERVLET_NAME = JAVAX_WS_RS_APPLICATION;
@BuildStep
public RequestContextFactoryBuildItem contextFactoryBuildItem() {
return new RequestContextFactoryBuildItem(ServletRequestContextFactory.INSTANCE);
}
@BuildStep
@Record(STATIC_INIT)
public void build(ResteasyReactiveServletRecorder restRecorder,
ResteasyReactiveDeploymentBuildItem deploymentBuildItem,
BuildProducer<FilterBuildItem> filter,
BuildProducer<ServletBuildItem> servlet) throws Exception {
if (deploymentBuildItem == null) {
return;
}
String path = deploymentBuildItem.getApplicationPath();
//if JAX-RS is installed at the root location we use a filter, otherwise we use a Servlet and take over the whole mapped path
if (path.equals("/") || path.isEmpty()) {
filter.produce(
FilterBuildItem.builder(JAX_RS_FILTER_NAME, ResteasyReactiveFilter.class.getName()).setLoadOnStartup(1)
.addFilterServletNameMapping("default", DispatcherType.REQUEST)
.addFilterServletNameMapping("default", DispatcherType.FORWARD)
.addFilterServletNameMapping("default", DispatcherType.INCLUDE)
.setInstanceFactory(restRecorder.filter(deploymentBuildItem.getDeployment()))
.setAsyncSupported(true)
.build());
} else {
String mappingPath = deploymentBuildItem.getApplicationPath();
servlet.produce(ServletBuildItem.builder(JAX_RS_SERVLET_NAME, ResteasyReactiveServlet.class.getName())
.setInstanceFactory(restRecorder.servlet(deploymentBuildItem.getDeployment()))
.setLoadOnStartup(1).addMapping(mappingPath + "/*").setAsyncSupported(true).build());
}
}
}
| ResteasyReactiveServletProcessor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/planning/IterativePlanner.java | {
"start": 2792,
"end": 12614
} | class ____ extends PlanningAlgorithm {
// Modifications performed by the algorithm that are not been reflected in the
// actual plan while a request is still pending.
private RLESparseResourceAllocation planModifications;
// Data extracted from plan
private RLESparseResourceAllocation planLoads;
private Resource capacity;
private long step;
// Job parameters
private ReservationRequestInterpreter jobType;
private long jobArrival;
private long jobDeadline;
// Phase algorithms
private StageExecutionInterval algStageExecutionInterval = null;
private StageAllocator algStageAllocator = null;
private final boolean allocateLeft;
// Constructor
public IterativePlanner(StageExecutionInterval algStageExecutionInterval,
StageAllocator algStageAllocator, boolean allocateLeft) {
this.allocateLeft = allocateLeft;
setAlgStageExecutionInterval(algStageExecutionInterval);
setAlgStageAllocator(algStageAllocator);
}
@Override
public RLESparseResourceAllocation computeJobAllocation(Plan plan,
ReservationId reservationId, ReservationDefinition reservation,
String user) throws PlanningException {
// Initialize
initialize(plan, reservationId, reservation);
// Create the allocations data structure
RLESparseResourceAllocation allocations =
new RLESparseResourceAllocation(plan.getResourceCalculator());
StageProvider stageProvider = new StageProvider(allocateLeft, reservation);
// Current stage
ReservationRequest currentReservationStage;
// initialize periodicity
long period = 0;
if(reservation.getRecurrenceExpression() != null){
period = Long.parseLong(reservation.getRecurrenceExpression());
}
// Iterate the stages in reverse order
while (stageProvider.hasNext()) {
// Get current stage
currentReservationStage = stageProvider.next();
// Validate that the ReservationRequest respects basic constraints
validateInputStage(plan, currentReservationStage);
// Set the stageArrival and stageDeadline
ReservationInterval stageInterval =
setStageExecutionInterval(plan, reservation, currentReservationStage,
allocations);
Long stageArrival = stageInterval.getStartTime();
Long stageDeadline = stageInterval.getEndTime();
// Compute stage allocation
Map<ReservationInterval, Resource> curAlloc =
computeStageAllocation(plan, currentReservationStage, stageArrival,
stageDeadline, period, user, reservationId);
// If we did not find an allocation, return NULL
// (unless it's an ANY job, then we simply continue).
if (curAlloc == null) {
// If it's an ANY job, we can move to the next possible request
if (jobType == ReservationRequestInterpreter.R_ANY) {
continue;
}
// Otherwise, the job cannot be allocated
throw new PlanningException("The request cannot be satisfied");
}
// Validate ORDER_NO_GAP
if (jobType == ReservationRequestInterpreter.R_ORDER_NO_GAP) {
if (!validateOrderNoGap(allocations, curAlloc, allocateLeft)) {
throw new PlanningException(
"The allocation found does not respect ORDER_NO_GAP");
}
}
// If we did find an allocation for the stage, add it
for (Entry<ReservationInterval, Resource> entry : curAlloc.entrySet()) {
allocations.addInterval(entry.getKey(), entry.getValue());
}
// If this is an ANY clause, we have finished
if (jobType == ReservationRequestInterpreter.R_ANY) {
break;
}
}
// If the allocation is empty, return an error
if (allocations.isEmpty()) {
throw new PlanningException("The request cannot be satisfied");
}
return allocations;
}
protected static boolean validateOrderNoGap(
RLESparseResourceAllocation allocations,
Map<ReservationInterval, Resource> curAlloc, boolean allocateLeft) {
// Left to right
if (allocateLeft) {
Long stageStartTime = findEarliestTime(curAlloc);
Long allocationEndTime = allocations.getLatestNonNullTime();
// Check that there is no gap between stages
if ((allocationEndTime != -1) && (allocationEndTime < stageStartTime)) {
return false;
}
// Right to left
} else {
Long stageEndTime = findLatestTime(curAlloc);
Long allocationStartTime = allocations.getEarliestStartTime();
// Check that there is no gap between stages
if ((allocationStartTime != -1) && (stageEndTime < allocationStartTime)) {
return false;
}
}
// Check that the stage allocation does not violate ORDER_NO_GAP
if (!isNonPreemptiveAllocation(curAlloc)) {
return false;
}
// The allocation is legal
return true;
}
protected void initialize(Plan plan, ReservationId reservationId,
ReservationDefinition reservation) throws PlanningException {
// Get plan step & capacity
capacity = plan.getTotalCapacity();
step = plan.getStep();
// Get job parameters (type, arrival time & deadline)
jobType = reservation.getReservationRequests().getInterpreter();
jobArrival = stepRoundUp(reservation.getArrival(), step);
jobDeadline = stepRoundDown(reservation.getDeadline(), step);
// Initialize the plan modifications
planModifications =
new RLESparseResourceAllocation(plan.getResourceCalculator());
// Dirty read of plan load
// planLoads are not used by other StageAllocators... and don't deal
// well with huge reservation ranges
planLoads = plan.getCumulativeLoadOverTime(jobArrival, jobDeadline);
ReservationAllocation oldRes = plan.getReservationById(reservationId);
if (oldRes != null) {
planLoads = RLESparseResourceAllocation.merge(
plan.getResourceCalculator(), plan.getTotalCapacity(), planLoads,
oldRes.getResourcesOverTime(jobArrival, jobDeadline),
RLEOperator.subtract, jobArrival, jobDeadline);
}
}
private void validateInputStage(Plan plan, ReservationRequest rr)
throws ContractValidationException {
// Validate concurrency
if (rr.getConcurrency() < 1) {
throw new ContractValidationException("Gang Size should be >= 1");
}
// Validate number of containers
if (rr.getNumContainers() <= 0) {
throw new ContractValidationException("Num containers should be > 0");
}
// Check that gangSize and numContainers are compatible
if (rr.getNumContainers() % rr.getConcurrency() != 0) {
throw new ContractValidationException(
"Parallelism must be an exact multiple of gang size");
}
// Check that the largest container request does not exceed the cluster-wide
// limit for container sizes
if (Resources.greaterThan(plan.getResourceCalculator(), capacity,
rr.getCapability(), plan.getMaximumAllocation())) {
throw new ContractValidationException(
"Individual capability requests should not exceed cluster's "
+ "maxAlloc");
}
}
private static boolean isNonPreemptiveAllocation(
Map<ReservationInterval, Resource> curAlloc) {
// Checks whether a stage allocation is non preemptive or not.
// Assumption: the intervals are non-intersecting (as returned by
// computeStageAllocation()).
// For a non-preemptive allocation, only two end points appear exactly once
Set<Long> endPoints = new HashSet<Long>(2 * curAlloc.size());
for (Entry<ReservationInterval, Resource> entry : curAlloc.entrySet()) {
ReservationInterval interval = entry.getKey();
Resource resource = entry.getValue();
// Ignore intervals with no allocation
if (Resources.equals(resource, Resource.newInstance(0, 0))) {
continue;
}
// Get endpoints
Long left = interval.getStartTime();
Long right = interval.getEndTime();
// Add left endpoint if we haven't seen it before, remove otherwise
if (!endPoints.contains(left)) {
endPoints.add(left);
} else {
endPoints.remove(left);
}
// Add right endpoint if we haven't seen it before, remove otherwise
if (!endPoints.contains(right)) {
endPoints.add(right);
} else {
endPoints.remove(right);
}
}
// Non-preemptive only if endPoints is of size 2
return (endPoints.size() == 2);
}
// Call setStageExecutionInterval()
protected ReservationInterval setStageExecutionInterval(Plan plan,
ReservationDefinition reservation,
ReservationRequest currentReservationStage,
RLESparseResourceAllocation allocations) {
return algStageExecutionInterval.computeExecutionInterval(plan,
reservation, currentReservationStage, allocateLeft, allocations);
}
// Call algStageAllocator
protected Map<ReservationInterval, Resource> computeStageAllocation(Plan plan,
ReservationRequest rr, long stageArrivalTime, long stageDeadline,
long period, String user, ReservationId oldId) throws PlanningException {
return algStageAllocator.computeStageAllocation(plan, planLoads,
planModifications, rr, stageArrivalTime, stageDeadline, period, user,
oldId);
}
// Set the algorithm: algStageExecutionInterval
public IterativePlanner setAlgStageExecutionInterval(
StageExecutionInterval alg) {
this.algStageExecutionInterval = alg;
return this; // To allow concatenation of setAlg() functions
}
// Set the algorithm: algStageAllocator
public IterativePlanner setAlgStageAllocator(StageAllocator alg) {
this.algStageAllocator = alg;
return this; // To allow concatenation of setAlg() functions
}
/**
* Helper | IterativePlanner |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/vo/UserVo.java | {
"start": 873,
"end": 2155
} | class ____ {
private String name;
private String addr;
private int age;
public UserVo(String name, String addr, int age) {
this.name = name;
this.addr = addr;
this.age = age;
}
public UserVo() {}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAddr() {
return addr;
}
public void setAddr(String addr) {
this.addr = addr;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public static UserVo getInstance() {
return new UserVo("dubbo", "hangzhou", 10);
}
@Override
public String toString() {
return "UserVo{" + "name='" + name + '\'' + ", addr='" + addr + '\'' + ", age=" + age + '}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UserVo userVo = (UserVo) o;
return age == userVo.age && Objects.equals(name, userVo.name) && Objects.equals(addr, userVo.addr);
}
@Override
public int hashCode() {
return Objects.hash(name, addr, age);
}
}
| UserVo |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/consumer/internals/SubscriptionStateTest.java | {
"start": 43979,
"end": 52188
} | class ____ implements ConsumerRebalanceListener {
Collection<TopicPartition> revoked;
public Collection<TopicPartition> assigned;
int revokedCount = 0;
int assignedCount = 0;
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
this.assigned = partitions;
assignedCount++;
}
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
this.revoked = partitions;
revokedCount++;
}
}
@Test
public void resetOffsetNoValidation() {
// Check that offset reset works when we can't validate offsets (older brokers)
Node broker1 = new Node(1, "localhost", 9092);
state.assignFromUser(Set.of(tp0));
// Reset offsets
state.requestOffsetReset(tp0, AutoOffsetResetStrategy.EARLIEST);
// Attempt to validate with older API version, should do nothing
ApiVersions oldApis = new ApiVersions();
oldApis.update("1", NodeApiVersions.create(ApiKeys.OFFSET_FOR_LEADER_EPOCH.id, (short) 0, (short) 2));
assertFalse(state.maybeValidatePositionForCurrentLeader(oldApis, tp0, new Metadata.LeaderAndEpoch(
Optional.of(broker1), Optional.empty())));
assertFalse(state.hasValidPosition(tp0));
assertFalse(state.awaitingValidation(tp0));
assertTrue(state.isOffsetResetNeeded(tp0));
// Complete the reset via unvalidated seek
state.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(10L));
assertTrue(state.hasValidPosition(tp0));
assertFalse(state.awaitingValidation(tp0));
assertFalse(state.isOffsetResetNeeded(tp0));
// Next call to validate offsets does nothing
assertFalse(state.maybeValidatePositionForCurrentLeader(oldApis, tp0, new Metadata.LeaderAndEpoch(
Optional.of(broker1), Optional.empty())));
assertTrue(state.hasValidPosition(tp0));
assertFalse(state.awaitingValidation(tp0));
assertFalse(state.isOffsetResetNeeded(tp0));
// Reset again, and complete it with a seek that would normally require validation
state.requestOffsetReset(tp0, AutoOffsetResetStrategy.EARLIEST);
state.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(10L, Optional.of(10), new Metadata.LeaderAndEpoch(
Optional.of(broker1), Optional.of(2))));
// We are now in AWAIT_VALIDATION
assertFalse(state.hasValidPosition(tp0));
assertTrue(state.awaitingValidation(tp0));
assertFalse(state.isOffsetResetNeeded(tp0));
// Now ensure next call to validate clears the validation state
assertFalse(state.maybeValidatePositionForCurrentLeader(oldApis, tp0, new Metadata.LeaderAndEpoch(
Optional.of(broker1), Optional.of(2))));
assertTrue(state.hasValidPosition(tp0));
assertFalse(state.awaitingValidation(tp0));
assertFalse(state.isOffsetResetNeeded(tp0));
}
@Test
public void nullPositionLagOnNoPosition() {
state.assignFromUser(Set.of(tp0));
assertNull(state.partitionLag(tp0, IsolationLevel.READ_UNCOMMITTED));
assertNull(state.partitionLag(tp0, IsolationLevel.READ_COMMITTED));
state.updateHighWatermark(tp0, 1L);
state.updateLastStableOffset(tp0, 1L);
assertNull(state.partitionLag(tp0, IsolationLevel.READ_UNCOMMITTED));
assertNull(state.partitionLag(tp0, IsolationLevel.READ_COMMITTED));
}
@Test
public void testPositionOrNull() {
state.assignFromUser(Set.of(tp0));
final TopicPartition unassignedPartition = new TopicPartition("unassigned", 0);
state.seek(tp0, 5);
assertEquals(5, state.positionOrNull(tp0).offset);
assertNull(state.positionOrNull(unassignedPartition));
}
@Test
public void testTryUpdatingHighWatermark() {
state.assignFromUser(Set.of(tp0));
final TopicPartition unassignedPartition = new TopicPartition("unassigned", 0);
final long highWatermark = 10L;
assertTrue(state.tryUpdatingHighWatermark(tp0, highWatermark));
assertEquals(highWatermark, state.partitionEndOffset(tp0, IsolationLevel.READ_UNCOMMITTED));
assertFalse(state.tryUpdatingHighWatermark(unassignedPartition, highWatermark));
}
@Test
public void testTryUpdatingLogStartOffset() {
state.assignFromUser(Set.of(tp0));
final TopicPartition unassignedPartition = new TopicPartition("unassigned", 0);
final long position = 25;
state.seek(tp0, position);
final long logStartOffset = 10L;
assertTrue(state.tryUpdatingLogStartOffset(tp0, logStartOffset));
assertEquals(position - logStartOffset, state.partitionLead(tp0));
assertFalse(state.tryUpdatingLogStartOffset(unassignedPartition, logStartOffset));
}
@Test
public void testTryUpdatingLastStableOffset() {
state.assignFromUser(Set.of(tp0));
final TopicPartition unassignedPartition = new TopicPartition("unassigned", 0);
final long lastStableOffset = 10L;
assertTrue(state.tryUpdatingLastStableOffset(tp0, lastStableOffset));
assertEquals(lastStableOffset, state.partitionEndOffset(tp0, IsolationLevel.READ_COMMITTED));
assertFalse(state.tryUpdatingLastStableOffset(unassignedPartition, lastStableOffset));
}
@Test
public void testTryUpdatingPreferredReadReplica() {
state.assignFromUser(Set.of(tp0));
final TopicPartition unassignedPartition = new TopicPartition("unassigned", 0);
final int preferredReadReplicaId = 10;
final LongSupplier expirationTimeMs = () -> System.currentTimeMillis() + 60000L;
assertTrue(state.tryUpdatingPreferredReadReplica(tp0, preferredReadReplicaId, expirationTimeMs));
assertEquals(Optional.of(preferredReadReplicaId), state.preferredReadReplica(tp0, System.currentTimeMillis()));
assertFalse(state.tryUpdatingPreferredReadReplica(unassignedPartition, preferredReadReplicaId, expirationTimeMs));
assertEquals(Optional.empty(), state.preferredReadReplica(unassignedPartition, System.currentTimeMillis()));
}
@Test
public void testRequestOffsetResetIfPartitionAssigned() {
state.assignFromUser(Set.of(tp0));
final TopicPartition unassignedPartition = new TopicPartition("unassigned", 0);
state.requestOffsetResetIfPartitionAssigned(tp0);
assertTrue(state.isOffsetResetNeeded(tp0));
state.requestOffsetResetIfPartitionAssigned(unassignedPartition);
assertThrows(IllegalStateException.class, () -> state.isOffsetResetNeeded(unassignedPartition));
}
// This test ensures the "fetchablePartitions" does not run the custom predicate if the partition is not fetchable
// This func is used in the hot path for fetching, to find fetchable partitions that are not in the buffer,
// so it should avoid evaluating the predicate if not needed.
@Test
public void testFetchablePartitionsPerformsCheapChecksFirst() {
// Setup fetchable partition and pause it
state.assignFromUser(Set.of(tp0));
state.seek(tp0, 100);
assertTrue(state.isFetchable(tp0));
state.pause(tp0);
// Retrieve fetchable partitions with custom predicate.
AtomicBoolean predicateEvaluated = new AtomicBoolean(false);
Predicate<TopicPartition> isBuffered = tp -> {
predicateEvaluated.set(true);
return true;
};
List<TopicPartition> fetchablePartitions = state.fetchablePartitions(isBuffered);
assertTrue(fetchablePartitions.isEmpty());
assertFalse(predicateEvaluated.get(), "Custom predicate should not be evaluated when partitions are not fetchable");
// Resume partition and retrieve fetchable again
state.resume(tp0);
predicateEvaluated.set(false);
fetchablePartitions = state.fetchablePartitions(isBuffered);
assertTrue(predicateEvaluated.get());
assertEquals(tp0, fetchablePartitions.get(0));
}
}
| MockRebalanceListener |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/test/java/org/apache/dubbo/remoting/exchange/support/header/HeaderExchangeClientTest.java | {
"start": 1052,
"end": 2109
} | class ____ {
@Test
void testReconnect() {
HeaderExchangeClient headerExchangeClient = new HeaderExchangeClient(Mockito.mock(Client.class), false);
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=true")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=tRue")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=30000")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=0")));
Assertions.assertTrue(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=-1")));
Assertions.assertFalse(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=false")));
Assertions.assertFalse(headerExchangeClient.shouldReconnect(URL.valueOf("localhost?reconnect=FALSE")));
}
}
| HeaderExchangeClientTest |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/state/internals/TimeOrderedCachingPersistentWindowStoreTest.java | {
"start": 4813,
"end": 62612
} | class ____ {
private static final int MAX_CACHE_SIZE_BYTES = 300;
private static final long DEFAULT_TIMESTAMP = 10L;
private static final Long WINDOW_SIZE = 10L;
private static final long SEGMENT_INTERVAL = 100L;
private static final String TOPIC = "topic";
private static final String CACHE_NAMESPACE = "0_0-store-name";
private ThreadCache cache;
private InternalMockProcessorContext<?, ?> context;
private TimeFirstWindowKeySchema baseKeySchema;
private WindowStore<Bytes, byte[]> underlyingStore;
private TimeOrderedCachingWindowStore cachingStore;
private RocksDBTimeOrderedWindowSegmentedBytesStore bytesStore;
private CacheFlushListenerStub<Windowed<String>, String> cacheListener;
private void setUp(final boolean hasIndex) {
baseKeySchema = new TimeFirstWindowKeySchema();
bytesStore = new RocksDBTimeOrderedWindowSegmentedBytesStore("test", "metrics-scope", 100, SEGMENT_INTERVAL, hasIndex);
underlyingStore = new RocksDBTimeOrderedWindowStore(bytesStore, false, WINDOW_SIZE);
final TimeWindowedDeserializer<String> keyDeserializer = new TimeWindowedDeserializer<>(new StringDeserializer(), WINDOW_SIZE);
keyDeserializer.setIsChangelogTopic(true);
cacheListener = new CacheFlushListenerStub<>(keyDeserializer, new StringDeserializer());
cachingStore = new TimeOrderedCachingWindowStore(underlyingStore, WINDOW_SIZE, SEGMENT_INTERVAL);
cachingStore.setFlushListener(cacheListener, false);
cache = new ThreadCache(new LogContext("testCache "), MAX_CACHE_SIZE_BYTES, new MockStreamsMetrics(new Metrics()));
context = new InternalMockProcessorContext<>(TestUtils.tempDirectory(), null, null, null, cache);
context.setRecordContext(new ProcessorRecordContext(DEFAULT_TIMESTAMP, 0, 0, TOPIC, new RecordHeaders()));
cachingStore.init(context, cachingStore);
}
@AfterEach
public void closeStore() {
cachingStore.close();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldDelegateInit(final boolean hasIndex) {
setUp(hasIndex);
final RocksDBTimeOrderedWindowStore inner = mock(RocksDBTimeOrderedWindowStore.class);
when(inner.hasIndex()).thenReturn(hasIndex);
final TimeOrderedCachingWindowStore outer = new TimeOrderedCachingWindowStore(inner, WINDOW_SIZE, SEGMENT_INTERVAL);
outer.init(context, outer);
verify(inner, times(1)).init(context, outer);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowIfWrongStore(final boolean hasIndex) {
setUp(hasIndex);
final RocksDBTimestampedWindowStore innerWrong = mock(RocksDBTimestampedWindowStore.class);
final Exception e = assertThrows(IllegalArgumentException.class,
() -> new TimeOrderedCachingWindowStore(innerWrong, WINDOW_SIZE, SEGMENT_INTERVAL));
assertThat(e.getMessage(),
containsString("TimeOrderedCachingWindowStore only supports RocksDBTimeOrderedWindowStore backed store"));
final RocksDBTimeOrderedWindowStore inner = mock(RocksDBTimeOrderedWindowStore.class);
// Nothing happens
new TimeOrderedCachingWindowStore(inner, WINDOW_SIZE, SEGMENT_INTERVAL);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotReturnDuplicatesInRanges(final boolean hasIndex) {
setUp(hasIndex);
final StreamsBuilder builder = new StreamsBuilder();
final StoreBuilder<TimestampedWindowStore<String, String>> storeBuilder = Stores.timestampedWindowStoreBuilder(
RocksDbIndexedTimeOrderedWindowBytesStoreSupplier.create(
"store-name",
ofHours(1L),
ofMinutes(1),
false,
hasIndex
), Serdes.String(), Serdes.String())
.withCachingEnabled();
builder.addStateStore(storeBuilder);
builder.stream(TOPIC,
Consumed.with(Serdes.String(), Serdes.String()))
.process(() -> new Processor<String, String, String, String>() {
private int numRecordsProcessed;
private WindowStore<String, ValueAndTimestamp<String>> store;
@Override
public void init(final ProcessorContext<String, String> processorContext) {
this.store = processorContext.getStateStore("store-name");
int count = 0;
try (final KeyValueIterator<Windowed<String>, ValueAndTimestamp<String>> all = store.all()) {
while (all.hasNext()) {
count++;
all.next();
}
}
assertThat(count, equalTo(0));
}
@Override
public void process(final Record<String, String> record) {
int count = 0;
try (final KeyValueIterator<Windowed<String>, ValueAndTimestamp<String>> all = store.all()) {
while (all.hasNext()) {
count++;
all.next();
}
}
assertThat(count, equalTo(numRecordsProcessed));
store.put(record.value(), ValueAndTimestamp.make(record.value(), record.timestamp()), record.timestamp());
numRecordsProcessed++;
}
}, "store-name");
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 10 * 1000L);
final Instant initialWallClockTime = Instant.ofEpochMilli(0L);
final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), streamsConfiguration, initialWallClockTime);
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(TOPIC,
new StringSerializer(),
new StringSerializer(),
initialWallClockTime,
Duration.ZERO);
for (int i = 0; i < 5; i++) {
inputTopic.pipeInput(UUID.randomUUID().toString(), UUID.randomUUID().toString());
}
driver.advanceWallClockTime(Duration.ofSeconds(10));
inputTopic.advanceTime(Duration.ofSeconds(10));
for (int i = 0; i < 5; i++) {
inputTopic.pipeInput(UUID.randomUUID().toString(), UUID.randomUUID().toString());
}
driver.advanceWallClockTime(Duration.ofSeconds(10));
inputTopic.advanceTime(Duration.ofSeconds(10));
for (int i = 0; i < 5; i++) {
inputTopic.pipeInput(UUID.randomUUID().toString(), UUID.randomUUID().toString());
}
driver.advanceWallClockTime(Duration.ofSeconds(10));
inputTopic.advanceTime(Duration.ofSeconds(10));
for (int i = 0; i < 5; i++) {
inputTopic.pipeInput(UUID.randomUUID().toString(), UUID.randomUUID().toString());
}
driver.close();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchFromCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
assertThat(cachingStore.fetch(bytesKey("a"), 10), equalTo(bytesValue("a")));
assertThat(cachingStore.fetch(bytesKey("b"), 10), equalTo(bytesValue("b")));
assertThat(cachingStore.fetch(bytesKey("c"), 10), equalTo(null));
assertThat(cachingStore.fetch(bytesKey("a"), 0), equalTo(null));
try (final WindowStoreIterator<byte[]> a = cachingStore.fetch(bytesKey("a"), ofEpochMilli(10), ofEpochMilli(10));
final WindowStoreIterator<byte[]> b = cachingStore.fetch(bytesKey("b"), ofEpochMilli(10), ofEpochMilli(10))) {
verifyKeyValue(a.next(), DEFAULT_TIMESTAMP, "a");
verifyKeyValue(b.next(), DEFAULT_TIMESTAMP, "b");
assertFalse(a.hasNext());
assertFalse(b.hasNext());
final int expectedSize = hasIndex ? 4 : 2;
assertEquals(expectedSize, cache.size());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldMatchPositionAfterPutWithFlushListener(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.setFlushListener(record -> { }, false);
shouldMatchPositionAfterPut();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldMatchPositionAfterPutWithoutFlushListener(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.setFlushListener(null, false);
shouldMatchPositionAfterPut();
}
private void shouldMatchPositionAfterPut() {
context.setRecordContext(new ProcessorRecordContext(0, 1, 0, "", new RecordHeaders()));
cachingStore.put(bytesKey("key1"), bytesValue("value1"), DEFAULT_TIMESTAMP);
context.setRecordContext(new ProcessorRecordContext(0, 2, 0, "", new RecordHeaders()));
cachingStore.put(bytesKey("key2"), bytesValue("value2"), DEFAULT_TIMESTAMP);
// Position should correspond to the last record's context, not the current context.
context.setRecordContext(
new ProcessorRecordContext(0, 3, 0, "", new RecordHeaders())
);
// the caching window store doesn't maintain a separate
// position because it never serves queries from the cache
assertEquals(Position.emptyPosition(), cachingStore.getPosition());
assertEquals(Position.emptyPosition(), underlyingStore.getPosition());
cachingStore.flush();
assertEquals(
Position.fromMap(mkMap(mkEntry("", mkMap(mkEntry(0, 2L))))),
cachingStore.getPosition()
);
assertEquals(
Position.fromMap(mkMap(mkEntry("", mkMap(mkEntry(0, 2L))))),
underlyingStore.getPosition()
);
}
private void verifyKeyValue(final KeyValue<Long, byte[]> next,
final long expectedKey,
final String expectedValue) {
assertThat(next.key, equalTo(expectedKey));
assertThat(next.value, equalTo(bytesValue(expectedValue)));
}
private static byte[] bytesValue(final String value) {
return value.getBytes();
}
private static Bytes bytesKey(final String key) {
return Bytes.wrap(key.getBytes());
}
@SuppressWarnings("resource")
private String stringFrom(final byte[] from) {
return new StringDeserializer().deserialize("", from);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchRangeFromCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("a", "b");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
final int expectedSize = hasIndex ? 4 : 2;
assertEquals(expectedSize, cache.size());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchRangeFromCacheForNullKeyFrom(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetch(null, bytesKey("d"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("a", "b", "c", "d");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchRangeFromCacheForNullKeyTo(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetch(bytesKey("b"), null, ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("e"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("b", "c", "d", "e");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchRangeFromCacheForNullKeyFromKeyTo(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetch(null, null, ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("e"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("a", "b", "c", "d", "e");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutBackwardFetchRangeFromCacheForNullKeyFrom(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetch(null, bytesKey("c"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("c", "b", "a");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutBackwardFetchRangeFromCacheForNullKeyTo(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetch(bytesKey("c"), null, ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("e"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("e", "d", "c");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutBackwardFetchRangeFromCacheForNullKeyFromKeyTo(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetch(null, null, ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("e"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("e", "d", "c", "b", "a");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldGetAllFromCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("f"), bytesValue("f"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("g"), bytesValue("g"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("h"), bytesValue("h"), DEFAULT_TIMESTAMP);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator = cachingStore.all()) {
final String[] array = {"a", "b", "c", "d", "e", "f", "g", "h"};
for (final String s : array) {
verifyWindowedKeyValue(
iterator.next(),
new Windowed<>(bytesKey(s), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
s);
}
assertFalse(iterator.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldGetAllBackwardFromCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("f"), bytesValue("f"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("g"), bytesValue("g"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("h"), bytesValue("h"), DEFAULT_TIMESTAMP);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator = cachingStore.backwardAll()) {
final String[] array = {"h", "g", "f", "e", "d", "c", "b", "a"};
for (final String s : array) {
verifyWindowedKeyValue(
iterator.next(),
new Windowed<>(bytesKey(s), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
s);
}
assertFalse(iterator.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAllWithinTimestampRange(final boolean hasIndex) {
setUp(hasIndex);
final String[] array = {"a", "b", "c", "d", "e", "f", "g", "h"};
for (int i = 0; i < array.length; i++) {
cachingStore.put(bytesKey(array[i]), bytesValue(array[i]), i);
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetchAll(ofEpochMilli(0), ofEpochMilli(7))) {
for (int i = 0; i < array.length; i++) {
final String str = array[i];
verifyWindowedKeyValue(
iterator.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator.hasNext());
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator1 =
cachingStore.fetchAll(ofEpochMilli(2), ofEpochMilli(4))) {
for (int i = 2; i <= 4; i++) {
final String str = array[i];
verifyWindowedKeyValue(
iterator1.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator1.hasNext());
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator2 =
cachingStore.fetchAll(ofEpochMilli(5), ofEpochMilli(7))) {
for (int i = 5; i <= 7; i++) {
final String str = array[i];
verifyWindowedKeyValue(
iterator2.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator2.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAllBackwardWithinTimestampRange(final boolean hasIndex) {
setUp(hasIndex);
final String[] array = {"a", "b", "c", "d", "e", "f", "g", "h"};
for (int i = 0; i < array.length; i++) {
cachingStore.put(bytesKey(array[i]), bytesValue(array[i]), i);
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetchAll(ofEpochMilli(0), ofEpochMilli(7))) {
for (int i = array.length - 1; i >= 0; i--) {
final String str = array[i];
verifyWindowedKeyValue(
iterator.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator.hasNext());
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator1 =
cachingStore.backwardFetchAll(ofEpochMilli(2), ofEpochMilli(4))) {
for (int i = 4; i >= 2; i--) {
final String str = array[i];
verifyWindowedKeyValue(
iterator1.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator1.hasNext());
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator2 =
cachingStore.backwardFetchAll(ofEpochMilli(5), ofEpochMilli(7))) {
for (int i = 7; i >= 5; i--) {
final String str = array[i];
verifyWindowedKeyValue(
iterator2.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator2.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFlushEvictedItemsIntoUnderlyingStore(final boolean hasIndex) {
setUp(hasIndex);
final int added = addItemsToCache();
// all dirty entries should have been flushed
try (final KeyValueIterator<Bytes, byte[]> iter = bytesStore.fetch(
Bytes.wrap("0".getBytes(StandardCharsets.UTF_8)),
DEFAULT_TIMESTAMP,
DEFAULT_TIMESTAMP)) {
final KeyValue<Bytes, byte[]> next = iter.next();
assertEquals(DEFAULT_TIMESTAMP, baseKeySchema.segmentTimestamp(next.key));
assertArrayEquals("0".getBytes(), next.value);
assertFalse(iter.hasNext());
assertEquals(added - 1, cache.size());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldForwardDirtyItemsWhenFlushCalled(final boolean hasIndex) {
setUp(hasIndex);
final Windowed<String> windowedKey =
new Windowed<>("1", new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE));
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("a", cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldSetFlushListener(final boolean hasIndex) {
setUp(hasIndex);
assertTrue(cachingStore.setFlushListener(null, true));
assertTrue(cachingStore.setFlushListener(null, false));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldForwardOldValuesWhenEnabled(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.setFlushListener(cacheListener, true);
final Windowed<String> windowedKey =
new Windowed<>("1", new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE));
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("b", cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
cacheListener.forwarded.clear();
cachingStore.put(bytesKey("1"), bytesValue("c"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("c", cacheListener.forwarded.get(windowedKey).newValue);
assertEquals("b", cacheListener.forwarded.get(windowedKey).oldValue);
cachingStore.put(bytesKey("1"), null, DEFAULT_TIMESTAMP);
cachingStore.flush();
assertNull(cacheListener.forwarded.get(windowedKey).newValue);
assertEquals("c", cacheListener.forwarded.get(windowedKey).oldValue);
cacheListener.forwarded.clear();
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), null, DEFAULT_TIMESTAMP);
cachingStore.flush();
assertNull(cacheListener.forwarded.get(windowedKey));
cacheListener.forwarded.clear();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotForwardOldValuesWhenDisabled(final boolean hasIndex) {
setUp(hasIndex);
final Windowed<String> windowedKey =
new Windowed<>("1", new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE));
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("b", cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
cachingStore.put(bytesKey("1"), bytesValue("c"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("c", cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
cachingStore.put(bytesKey("1"), null, DEFAULT_TIMESTAMP);
cachingStore.flush();
assertNull(cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
cacheListener.forwarded.clear();
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), null, DEFAULT_TIMESTAMP);
cachingStore.flush();
assertNull(cacheListener.forwarded.get(windowedKey));
cacheListener.forwarded.clear();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldForwardDirtyItemToListenerWhenEvicted(final boolean hasIndex) {
setUp(hasIndex);
final int numRecords = addItemsToCache();
assertEquals(numRecords, cacheListener.forwarded.size());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldTakeValueFromCacheIfSameTimestampFlushedToRocks(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.flush();
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "b");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateAcrossWindows(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a");
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateBackwardAcrossWindows(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.backwardFetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b");
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateCacheAndStore(final boolean hasIndex) {
setUp(hasIndex);
final Bytes key = Bytes.wrap("1".getBytes());
bytesStore.put(TimeFirstWindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes());
cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a");
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateBackwardCacheAndStore(final boolean hasIndex) {
setUp(hasIndex);
final Bytes key = Bytes.wrap("1".getBytes());
bytesStore.put(TimeFirstWindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes());
cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.backwardFetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b");
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateCacheAndStoreKeyRange(final boolean hasIndex) {
setUp(hasIndex);
final Bytes key = Bytes.wrap("1".getBytes());
bytesStore.put(TimeFirstWindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes());
cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> fetchRange =
cachingStore.fetch(key, bytesKey("2"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyWindowedKeyValue(
fetchRange.next(),
new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
"a");
verifyWindowedKeyValue(
fetchRange.next(),
new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP + WINDOW_SIZE, DEFAULT_TIMESTAMP + WINDOW_SIZE + WINDOW_SIZE)),
"b");
assertFalse(fetchRange.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateBackwardCacheAndStoreKeyRange(final boolean hasIndex) {
setUp(hasIndex);
final Bytes key = Bytes.wrap("1".getBytes());
bytesStore.put(TimeFirstWindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes());
cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> fetchRange =
cachingStore.backwardFetch(key, bytesKey("2"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyWindowedKeyValue(
fetchRange.next(),
new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP + WINDOW_SIZE, DEFAULT_TIMESTAMP + WINDOW_SIZE + WINDOW_SIZE)),
"b");
verifyWindowedKeyValue(
fetchRange.next(),
new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
"a");
assertFalse(fetchRange.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldClearNamespaceCacheOnClose(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), 0L);
final int size = hasIndex ? 2 : 1;
assertEquals(size, cache.size());
cachingStore.close();
assertEquals(0, cache.size());
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowIfTryingToFetchFromClosedCachingStore(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.close();
assertThrows(InvalidStateStoreException.class, () -> cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofEpochMilli(10)));
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowIfTryingToFetchRangeFromClosedCachingStore(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.close();
assertThrows(InvalidStateStoreException.class, () -> cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(0), ofEpochMilli(10)));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowIfTryingToWriteToClosedCachingStore(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.close();
assertThrows(InvalidStateStoreException.class, () -> cachingStore.put(bytesKey("a"), bytesValue("a"), 0L));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldSkipNonExistBaseKeyInCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
final SegmentedCacheFunction indexCacheFunction = new SegmentedCacheFunction(new KeyFirstWindowKeySchema(), SEGMENT_INTERVAL);
final Bytes key = bytesKey("a");
final byte[] value = bytesValue("0001");
final Bytes cacheIndexKey = indexCacheFunction.cacheKey(KeyFirstWindowKeySchema.toStoreKeyBinary(key, 1, 0));
final String cacheName = context.taskId() + "-test";
// Only put index to store
cache.put(cacheName,
cacheIndexKey,
new LRUCacheEntry(
new byte[0],
new RecordHeaders(),
true,
context.recordContext().offset(),
context.recordContext().timestamp(),
context.recordContext().partition(),
"",
context.recordContext().sourceRawKey(),
context.recordContext().sourceRawValue()
)
);
underlyingStore.put(key, value, 1);
if (hasIndex) {
verifyKeyValueList(
asList(
windowedPair("a", "0001", 1),
windowedPair("aa", "0002", 0)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
} else {
verifyKeyValueList(
asList(
windowedPair("aa", "0002", 0),
windowedPair("a", "0001", 1)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAndIterateOverExactKeys(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
cachingStore.put(bytesKey("a"), bytesValue("0003"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0004"), 1);
cachingStore.put(bytesKey("a"), bytesValue("0005"), SEGMENT_INTERVAL);
final List<KeyValue<Long, byte[]>> expected = asList(
KeyValue.pair(0L, bytesValue("0001")),
KeyValue.pair(1L, bytesValue("0003")),
KeyValue.pair(SEGMENT_INTERVAL, bytesValue("0005"))
);
final List<KeyValue<Long, byte[]>> actual =
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)));
verifyKeyValueList(expected, actual);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldBackwardFetchAndIterateOverExactKeys(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
cachingStore.put(bytesKey("a"), bytesValue("0003"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0004"), 1);
cachingStore.put(bytesKey("a"), bytesValue("0005"), SEGMENT_INTERVAL);
final List<KeyValue<Long, byte[]>> expected = asList(
KeyValue.pair(SEGMENT_INTERVAL, bytesValue("0005")),
KeyValue.pair(1L, bytesValue("0003")),
KeyValue.pair(0L, bytesValue("0001"))
);
final List<KeyValue<Long, byte[]>> actual =
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)));
verifyKeyValueList(expected, actual);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAndIterateOverKeyRange(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
cachingStore.put(bytesKey("a"), bytesValue("0003"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0004"), 1);
cachingStore.put(bytesKey("a"), bytesValue("0005"), SEGMENT_INTERVAL);
verifyKeyValueList(
asList(
windowedPair("a", "0001", 0),
windowedPair("a", "0003", 1),
windowedPair("a", "0005", SEGMENT_INTERVAL)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)))
);
verifyKeyValueList(
asList(
windowedPair("aa", "0002", 0),
windowedPair("aa", "0004", 1)),
toListAndCloseIterator(cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)))
);
if (hasIndex) {
verifyKeyValueList(
asList(
windowedPair("a", "0001", 0),
windowedPair("a", "0003", 1),
windowedPair("aa", "0002", 0),
windowedPair("aa", "0004", 1),
windowedPair("a", "0005", SEGMENT_INTERVAL)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
} else {
verifyKeyValueList(
asList(
windowedPair("a", "0001", 0),
windowedPair("aa", "0002", 0),
windowedPair("a", "0003", 1),
windowedPair("aa", "0004", 1),
windowedPair("a", "0005", SEGMENT_INTERVAL)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAndIterateOverKeyBackwardRange(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
cachingStore.put(bytesKey("a"), bytesValue("0003"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0004"), 1);
cachingStore.put(bytesKey("a"), bytesValue("0005"), SEGMENT_INTERVAL);
verifyKeyValueList(
asList(
windowedPair("a", "0005", SEGMENT_INTERVAL),
windowedPair("a", "0003", 1),
windowedPair("a", "0001", 0)
),
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("a"), bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)))
);
verifyKeyValueList(
asList(
windowedPair("aa", "0004", 1),
windowedPair("aa", "0002", 0)),
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("aa"), bytesKey("aa"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)))
);
if (!hasIndex) {
verifyKeyValueList(
// Ordered by timestamp if has no index
asList(
windowedPair("a", "0005", SEGMENT_INTERVAL),
windowedPair("aa", "0004", 1),
windowedPair("a", "0003", 1),
windowedPair("aa", "0002", 0),
windowedPair("a", "0001", 0)
),
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
} else {
verifyKeyValueList(
asList(
// First because in larger segments
windowedPair("a", "0005", SEGMENT_INTERVAL),
windowedPair("aa", "0004", 1),
windowedPair("aa", "0002", 0),
windowedPair("a", "0003", 1),
windowedPair("a", "0001", 0)
),
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldReturnSameResultsForSingleKeyFetchAndEqualKeyRangeFetch(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0003"), 2);
cachingStore.put(bytesKey("aaa"), bytesValue("0004"), 3);
try (final WindowStoreIterator<byte[]> singleKeyIterator = cachingStore.fetch(bytesKey("aa"), 0L, 5L);
final KeyValueIterator<Windowed<Bytes>, byte[]> keyRangeIterator = cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), 0L, 5L)) {
assertEquals(stringFrom(singleKeyIterator.next().value), stringFrom(keyRangeIterator.next().value));
assertEquals(stringFrom(singleKeyIterator.next().value), stringFrom(keyRangeIterator.next().value));
assertFalse(singleKeyIterator.hasNext());
assertFalse(keyRangeIterator.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldReturnSameResultsForSingleKeyFetchAndEqualKeyRangeBackwardFetch(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0003"), 2);
cachingStore.put(bytesKey("aaa"), bytesValue("0004"), 3);
try (final WindowStoreIterator<byte[]> singleKeyIterator =
cachingStore.backwardFetch(bytesKey("aa"), Instant.ofEpochMilli(0L), Instant.ofEpochMilli(5L));
final KeyValueIterator<Windowed<Bytes>, byte[]> keyRangeIterator =
cachingStore.backwardFetch(bytesKey("aa"), bytesKey("aa"), Instant.ofEpochMilli(0L), Instant.ofEpochMilli(5L))) {
assertEquals(stringFrom(singleKeyIterator.next().value), stringFrom(keyRangeIterator.next().value));
assertEquals(stringFrom(singleKeyIterator.next().value), stringFrom(keyRangeIterator.next().value));
assertFalse(singleKeyIterator.hasNext());
assertFalse(keyRangeIterator.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowNullPointerExceptionOnPutNullKey(final boolean hasIndex) {
setUp(hasIndex);
assertThrows(NullPointerException.class, () -> cachingStore.put(null, bytesValue("anyValue"), 0L));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotThrowNullPointerExceptionOnPutNullValue(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), null, 0L);
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowNullPointerExceptionOnFetchNullKey(final boolean hasIndex) {
setUp(hasIndex);
assertThrows(NullPointerException.class, () -> cachingStore.fetch(null, ofEpochMilli(1L), ofEpochMilli(2L)));
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotThrowInvalidRangeExceptionWithNegativeFromKey(final boolean hasIndex) {
setUp(hasIndex);
final Bytes keyFrom = Bytes.wrap(new IntegerSerializer().serialize("", -1));
final Bytes keyTo = Bytes.wrap(new IntegerSerializer().serialize("", 1));
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(TimeOrderedCachingWindowStore.class);
final KeyValueIterator<Windowed<Bytes>, byte[]> iterator = cachingStore.fetch(keyFrom, keyTo, 0L, 10L)) {
assertFalse(iterator.hasNext());
final List<String> messages = appender.getMessages();
assertThat(
messages,
hasItem("Returning empty iterator for fetch with invalid key range: from > to." +
" This may be due to range arguments set in the wrong order, " +
"or serdes that don't preserve ordering when lexicographically comparing the serialized bytes." +
" Note that the built-in numerical serdes do not follow this for negative numbers")
);
}
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotThrowInvalidBackwardRangeExceptionWithNegativeFromKey(final boolean hasIndex) {
setUp(hasIndex);
final Bytes keyFrom = Bytes.wrap(new IntegerSerializer().serialize("", -1));
final Bytes keyTo = Bytes.wrap(new IntegerSerializer().serialize("", 1));
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(TimeOrderedCachingWindowStore.class);
final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetch(keyFrom, keyTo, Instant.ofEpochMilli(0L), Instant.ofEpochMilli(10L))) {
assertFalse(iterator.hasNext());
final List<String> messages = appender.getMessages();
assertThat(
messages,
hasItem("Returning empty iterator for fetch with invalid key range: from > to." +
" This may be due to serdes that don't preserve ordering when lexicographically comparing the serialized bytes." +
" Note that the built-in numerical serdes do not follow this for negative numbers")
);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldCloseCacheAndWrappedStoreAfterErrorDuringCacheFlush(final boolean hasIndex) {
setUp(hasIndex);
setUpCloseTests();
doThrow(new RuntimeException("Simulating an error on flush2")).doNothing()
.when(cache).flush(CACHE_NAMESPACE);
assertThrows(RuntimeException.class, cachingStore::close);
verifyAndTearDownCloseTests();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldCloseWrappedStoreAfterErrorDuringCacheClose(final boolean hasIndex) {
setUp(hasIndex);
setUpCloseTests();
doThrow(new RuntimeException("Simulating an error on close")).doNothing().when(cache).close(CACHE_NAMESPACE);
assertThrows(RuntimeException.class, cachingStore::close);
verifyAndTearDownCloseTests();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldCloseCacheAfterErrorDuringStateStoreClose(final boolean hasIndex) {
setUp(hasIndex);
setUpCloseTests();
doThrow(new RuntimeException("Simulating an error on close")).doNothing().when(underlyingStore).close();
assertThrows(RuntimeException.class, cachingStore::close);
verifyAndTearDownCloseTests();
}
private void setUpCloseTests() {
underlyingStore = mock(RocksDBTimeOrderedWindowStore.class);
when(underlyingStore.name()).thenReturn("store-name");
when(underlyingStore.isOpen()).thenReturn(true);
cachingStore = new TimeOrderedCachingWindowStore(underlyingStore, WINDOW_SIZE, SEGMENT_INTERVAL);
cache = mock(ThreadCache.class);
context = new InternalMockProcessorContext<>(TestUtils.tempDirectory(), null, null, null, cache);
context.setRecordContext(new ProcessorRecordContext(10, 0, 0, TOPIC, new RecordHeaders()));
cachingStore.init(context, cachingStore);
}
private static KeyValue<Windowed<Bytes>, byte[]> windowedPair(final String key, final String value, final long timestamp) {
return KeyValue.pair(
new Windowed<>(bytesKey(key), new TimeWindow(timestamp, timestamp + WINDOW_SIZE)),
bytesValue(value));
}
private int addItemsToCache() {
long cachedSize = 0;
int i = 0;
while (cachedSize < MAX_CACHE_SIZE_BYTES) {
final String kv = String.valueOf(i++);
cachingStore.put(bytesKey(kv), bytesValue(kv), DEFAULT_TIMESTAMP);
cachedSize += memoryCacheEntrySize(kv.getBytes(), kv.getBytes(), TOPIC) +
8 + // timestamp
4; // sequenceNumber
}
return i;
}
private void verifyAndTearDownCloseTests() {
verify(underlyingStore).close();
verify(cache).flush(CACHE_NAMESPACE);
verify(cache).close(CACHE_NAMESPACE);
}
}
| TimeOrderedCachingPersistentWindowStoreTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java | {
"start": 3660,
"end": 34787
} | class ____ extends AbstractBinder {
@Override
protected void configure() {
appContext = new MockAppContext(0, 1, 2, 1);
App app = new App(appContext);
bind(appContext).to(AppContext.class).named("am");
bind(app).to(App.class).named("app");
bind(CONF).to(Configuration.class).named("conf");
final HttpServletResponse response = mock(HttpServletResponse.class);
final HttpServletRequest request = mock(HttpServletRequest.class);
bind(response).to(HttpServletResponse.class);
bind(request).to(HttpServletRequest.class);
}
}
public TestAMWebServicesTasks() {
}
@Test
public void testTasks() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyAMTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").request().get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyAMTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks/")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyAMTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksXML() throws JSONException, Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks")
.request(MediaType.APPLICATION_XML).get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList tasks = dom.getElementsByTagName("tasks");
assertEquals(1, tasks.getLength(), "incorrect number of elements");
NodeList task = dom.getElementsByTagName("task");
verifyAMTaskXML(task, jobsMap.get(id));
}
}
@Test
public void testTasksQueryMap() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String type = "m";
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").queryParam("type", type)
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONObject task = tasks.getJSONObject("task");
JSONArray arr = new JSONArray();
arr.put(task);
assertEquals(1, arr.length(), "incorrect number of elements");
verifyAMTask(arr, jobsMap.get(id), type);
}
}
@Test
public void testTasksQueryReduce() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String type = "r";
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").queryParam("type", type)
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONObject task = tasks.getJSONObject("task");
JSONArray arr = new JSONArray();
arr.put(task);
assertEquals(1, arr.length(), "incorrect number of elements");
verifyAMTask(arr, jobsMap.get(id), type);
}
}
@Test
public void testTasksQueryInvalid() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
// tasktype must be exactly either "m" or "r"
String tasktype = "reduce";
try {
Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").queryParam("type", tasktype)
.request(MediaType.APPLICATION_JSON).get();
throw new BadRequestException(response);
} catch (BadRequestException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.BAD_REQUEST, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"tasktype must be either m or r", message);
WebServicesTestUtils.checkStringMatch("exception type",
"BadRequestException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
}
}
}
@Test
public void testTaskId() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid)
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyAMSingleTask(info, task);
}
}
}
@Test
public void testTaskIdSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid + "/")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyAMSingleTask(info, task);
}
}
}
@Test
public void testTaskIdDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid).request()
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyAMSingleTask(info, task);
}
}
}
@Test
public void testTaskIdBogus() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "bogustaskid";
try {
Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).request().get();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"bogustaskid is not properly formed"
+ "\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,11 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdNonExist() throws JSONException, Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_m_000000";
try {
Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String entity = response.readEntity(String.class);
JSONObject msg = new JSONObject(entity);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"task not found with id task_0_0000_m_000000", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_d_000000";
try {
Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : "
+ "task_0_0000_d_000000 is not properly formed"
+ "\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,20 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid2() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_m_000000";
try {
Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : "
+ "task_0_m_000000 is not properly formed"
+ "\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,15 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid3() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_m";
try {
Response response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : "
+ "task_0_0000_m is not properly formed"
+ "\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,13 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdXML() throws Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid)
.request(MediaType.APPLICATION_XML).get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("task");
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyAMSingleTaskXML(element, task);
}
}
}
}
public void verifyAMSingleTask(JSONObject info, Task task)
throws JSONException {
assertEquals(9, info.length(), "incorrect number of elements");
verifyTaskGeneric(task, info.getString("id"), info.getString("state"),
info.getString("type"), info.getString("successfulAttempt"),
info.getLong("startTime"), info.getLong("finishTime"),
info.getLong("elapsedTime"), (float) info.getDouble("progress"),
info.getString("status"));
}
public void verifyAMTask(JSONArray arr, Job job, String type)
throws JSONException {
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
boolean found = false;
if (type != null && task.getType() == MRApps.taskType(type)) {
for (int i = 0; i < arr.length(); i++) {
JSONObject info = arr.getJSONObject(i);
if (tid.matches(info.getString("id"))) {
found = true;
verifyAMSingleTask(info, task);
}
}
assertTrue(found, "task with id: " + tid + " not in web service output");
}
}
}
public void verifyTaskGeneric(Task task, String id, String state,
String type, String successfulAttempt, long startTime, long finishTime,
long elapsedTime, float progress, String status) {
TaskId taskid = task.getID();
String tid = MRApps.toString(taskid);
TaskReport report = task.getReport();
WebServicesTestUtils.checkStringMatch("id", tid, id);
WebServicesTestUtils.checkStringMatch("type", task.getType().toString(),
type);
WebServicesTestUtils.checkStringMatch("state", report.getTaskState()
.toString(), state);
// not easily checked without duplicating logic, just make sure its here
assertNotNull(successfulAttempt, "successfulAttempt null");
assertEquals(report.getStartTime(), startTime, "startTime wrong");
assertEquals(report.getFinishTime(), finishTime, "finishTime wrong");
assertEquals(finishTime - startTime, elapsedTime, "elapsedTime wrong");
assertEquals(report.getProgress() * 100, progress, 1e-3f, "progress wrong");
assertEquals(report.getStatus(), status, "status wrong");
}
public void verifyAMSingleTaskXML(Element element, Task task) {
verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"),
WebServicesTestUtils.getXmlString(element, "state"),
WebServicesTestUtils.getXmlString(element, "type"),
WebServicesTestUtils.getXmlString(element, "successfulAttempt"),
WebServicesTestUtils.getXmlLong(element, "startTime"),
WebServicesTestUtils.getXmlLong(element, "finishTime"),
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
WebServicesTestUtils.getXmlFloat(element, "progress"),
WebServicesTestUtils.getXmlString(element, "status"));
}
public void verifyAMTaskXML(NodeList nodes, Job job) {
assertEquals(2, nodes.getLength(), "incorrect number of elements");
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
boolean found = false;
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
found = true;
verifyAMSingleTaskXML(element, task);
}
}
assertTrue(found, "task with id: " + tid + " not in web service output");
}
}
@Test
public void testTaskIdCounters() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid).path("counters")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyAMJobTaskCounters(info, task);
}
}
}
@Test
public void testTaskIdCountersSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid).path("counters/")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyAMJobTaskCounters(info, task);
}
}
}
@Test
public void testTaskIdCountersDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid).path("counters").request()
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyAMJobTaskCounters(info, task);
}
}
}
@Test
public void testJobTaskCountersXML() throws Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid).path("counters")
.request(MediaType.APPLICATION_XML).get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList info = dom.getElementsByTagName("jobTaskCounters");
verifyAMTaskCountersXML(info, task);
}
}
}
public void verifyAMJobTaskCounters(JSONObject info, Task task)
throws JSONException {
assertEquals(2, info.length(), "incorrect number of elements");
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()),
info.getString("id"));
// just do simple verification of fields - not data is correct
// in the fields
JSONArray counterGroups = info.getJSONArray("taskCounterGroup");
for (int i = 0; i < counterGroups.length(); i++) {
JSONObject counterGroup = counterGroups.getJSONObject(i);
String name = counterGroup.getString("counterGroupName");
assertTrue((name != null && !name.isEmpty()), "name not set");
JSONArray counters = counterGroup.getJSONArray("counter");
for (int j = 0; j < counters.length(); j++) {
JSONObject counter = counters.getJSONObject(j);
String counterName = counter.getString("name");
assertTrue((counterName != null && !counterName.isEmpty()), "name not set");
long value = counter.getLong("value");
assertTrue(value >= 0, "value >= 0");
}
}
}
public void verifyAMTaskCountersXML(NodeList nodes, Task task) {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
WebServicesTestUtils.checkStringMatch("id",
MRApps.toString(task.getID()),
WebServicesTestUtils.getXmlString(element, "id"));
// just do simple verification of fields - not data is correct
// in the fields
NodeList groups = element.getElementsByTagName("taskCounterGroup");
for (int j = 0; j < groups.getLength(); j++) {
Element counters = (Element) groups.item(j);
assertNotNull(counters, "should have counters in the web service info");
String name = WebServicesTestUtils.getXmlString(counters,
"counterGroupName");
assertTrue((name != null && !name.isEmpty()), "name not set");
NodeList counterArr = counters.getElementsByTagName("counter");
for (int z = 0; z < counterArr.getLength(); z++) {
Element counter = (Element) counterArr.item(z);
String counterName = WebServicesTestUtils.getXmlString(counter,
"name");
assertTrue((counterName != null && !counterName.isEmpty()), "counter name not set");
long value = WebServicesTestUtils.getXmlLong(counter, "value");
assertTrue(value >= 0, "value not >= 0");
}
}
}
}
}
| JerseyBinder |
java | apache__camel | components/camel-test/camel-test-main-junit5/src/main/java/org/apache/camel/test/main/junit5/CamelMainContext.java | {
"start": 6880,
"end": 7851
} | class
____.getShutdownStrategy().setTimeout(getOuterClassAnnotation().shutdownTimeout());
}
/**
* Inject all the Camel related object instances into the given test instance.
*/
private void initInstance(CamelBeanPostProcessor beanPostProcessor, Object instance) throws Exception {
final Class<?> requiredTestClass = instance.getClass();
beanPostProcessor.postProcessBeforeInitialization(instance, requiredTestClass.getName());
beanPostProcessor.postProcessAfterInitialization(instance, requiredTestClass.getName());
}
/**
* Mock the endpoints corresponding to the patterns provided by {@link CamelMainTest#mockEndpoints()} and
* {@link CamelMainTest#mockEndpointsAndSkip()} if any.
* <p/>
* {@code @Nested} test classes can configure patterns to mock endpoints. The value of those attributes set on
* the innermost | context |
java | quarkusio__quarkus | integration-tests/main/src/test/java/io/quarkus/it/main/CommonNestedTest.java | {
"start": 305,
"end": 599
} | class ____ {
@Test
public void testProfileFromNested() {
RestAssured.when()
.get("/greeting/Stu")
.then()
.statusCode(200)
.body(is(defaultProfile() + " Stu"));
}
}
}
| NestedTests |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/WatsonLanguageEndpointBuilderFactory.java | {
"start": 12388,
"end": 14698
} | interface ____ {
/**
* IBM Watson Language (camel-ibm-watson-language)
* Perform natural language processing using IBM Watson Natural Language
* Understanding
*
* Category: ai,cloud
* Since: 4.16
* Maven coordinates: org.apache.camel:camel-ibm-watson-language
*
* @return the dsl builder for the headers' name.
*/
default WatsonLanguageHeaderNameBuilder ibmWatsonLanguage() {
return WatsonLanguageHeaderNameBuilder.INSTANCE;
}
/**
* IBM Watson Language (camel-ibm-watson-language)
* Perform natural language processing using IBM Watson Natural Language
* Understanding
*
* Category: ai,cloud
* Since: 4.16
* Maven coordinates: org.apache.camel:camel-ibm-watson-language
*
* Syntax: <code>ibm-watson-language:label</code>
*
* Path parameter: label (required)
* Logical name
*
* @param path label
* @return the dsl builder
*/
default WatsonLanguageEndpointBuilder ibmWatsonLanguage(String path) {
return WatsonLanguageEndpointBuilderFactory.endpointBuilder("ibm-watson-language", path);
}
/**
* IBM Watson Language (camel-ibm-watson-language)
* Perform natural language processing using IBM Watson Natural Language
* Understanding
*
* Category: ai,cloud
* Since: 4.16
* Maven coordinates: org.apache.camel:camel-ibm-watson-language
*
* Syntax: <code>ibm-watson-language:label</code>
*
* Path parameter: label (required)
* Logical name
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path label
* @return the dsl builder
*/
default WatsonLanguageEndpointBuilder ibmWatsonLanguage(String componentName, String path) {
return WatsonLanguageEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the IBM Watson Language component.
*/
public static | WatsonLanguageBuilders |
java | google__guice | extensions/dagger-adapter/test/com/google/inject/daggeradapter/DaggerAdapterTest.java | {
"start": 2099,
"end": 2491
} | class ____ extends AbstractModule {
@Provides
String aString(Integer i) {
return i.toString();
}
}
public void testInteractionWithGuiceModules() {
Injector i =
Guice.createInjector(new SimpleGuiceModule(), DaggerAdapter.from(new SimpleDaggerModule()));
assertThat(i.getInstance(String.class)).isEqualTo("1");
}
@dagger.Module
static | SimpleGuiceModule |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java | {
"start": 11040,
"end": 11443
} | interface ____ {
StateTransitionManager create(
StateTransitionManager.Context context,
Supplier<Temporal> clock,
Duration cooldownTimeout,
Duration resourceStabilizationTimeout,
Duration maximumDelayForTrigger);
}
/**
* Consolidated settings for the adaptive scheduler. This | StateTransitionManagerFactory |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsonschema/FormatVisitor5393Test.java | {
"start": 505,
"end": 2581
} | class ____
{
@JsonIgnore
public String ignoredProp;
public String normalProperty;
@JsonProperty("renamedProperty")
public String someProperty;
// [databind#5393]
@JsonAnyGetter
public Map<String, Object> anyProperties() {
return new TreeMap<>();
}
}
private final ObjectMapper MAPPER = newJsonMapper();
// [databind#5393]: regression wrt `@JsonAnyGetter`
@Test
public void ignoreExplicitlyIgnoredAndAnyGetter() throws Exception {
final TreeSet<String> expected = new TreeSet<>();
expected.add("normalProperty");
expected.add("renamedProperty");
final Set<String> actual = new TreeSet<>();
MAPPER.acceptJsonFormatVisitor(TestJsonIgnoredProperties.class,
new JsonFormatVisitorWrapper.Base() {
@Override
public JsonObjectFormatVisitor expectObjectFormat(JavaType type) {
return new JsonObjectFormatVisitor.Base() {
@Override
public void property(BeanProperty prop) {
actual.add(prop.getName());
}
@Override
public void property(String name, JsonFormatVisitable handler, JavaType propertyTypeHint) {
actual.add(name);
}
@Override
public void optionalProperty(BeanProperty prop) {
actual.add(prop.getName());
}
@Override
public void optionalProperty(String name, JsonFormatVisitable handler, JavaType propertyTypeHint) {
actual.add(name);
}
};
}
});
assertEquals(expected, actual);
}
}
| TestJsonIgnoredProperties |
java | netty__netty | transport-classes-epoll/src/main/java/io/netty/channel/epoll/EpollEventLoopGroup.java | {
"start": 1774,
"end": 1912
} | class ____ be loaded and only do the check when its actually
// instanced.
{
// Ensure JNI is initialized by the time this | can |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RScript.java | {
"start": 1162,
"end": 6716
} | enum ____ {
BOOLEAN(RedisCommands.EVAL_BOOLEAN_SAFE),
INTEGER(RedisCommands.EVAL_LONG),
MULTI(RedisCommands.EVAL_LIST),
STATUS(RedisCommands.EVAL_STRING),
VALUE(RedisCommands.EVAL_OBJECT),
MAPVALUE(RedisCommands.EVAL_MAP_VALUE),
MAPVALUELIST(RedisCommands.EVAL_MAP_VALUE_LIST);
private final RedisCommand<?> command;
ReturnType(RedisCommand<?> command) {
this.command = command;
}
public RedisCommand<?> getCommand() {
return command;
}
};
/**
* Executes Lua script stored in Redis scripts cache by SHA-1 digest
*
* @param <R> - type of result
* @param mode - execution mode
* @param shaDigest - SHA-1 digest
* @param returnType - return type
* @param keys - keys available through KEYS param in script
* @param values - values available through ARGV param in script
* @return result object
*/
<R> R evalSha(Mode mode, String shaDigest, ReturnType returnType, List<Object> keys, Object... values);
/**
* Executes a Lua script stored in Redis scripts cache by SHA-1 digest <code>shaDigest</code>.
* The script is executed over all Redis master or slave nodes in cluster depending on <code>mode</code> value.
* <code>resultMapper</code> function reduces all results from Redis nodes into one.
*
* @param mode - execution mode
* @param shaDigest - SHA-1 digest
* @param returnType - return type
* @param resultMapper - function for reducing multiple results into one
* @param values - values available through ARGV param in script
* @return result object
* @param <R> - type of result
*/
<R> R evalSha(Mode mode, String shaDigest, ReturnType returnType, Function<Collection<R>, R> resultMapper, Object... values);
/**
* Executes Lua script stored in Redis scripts cache by SHA-1 digest
*
* @param <R> - type of result
* @param key - used to locate Redis node in Cluster which stores cached Lua script
* @param mode - execution mode
* @param shaDigest - SHA-1 digest
* @param returnType - return type
* @param keys - keys available through KEYS param in script
* @param values - values available through ARGV param in script
* @return result object
*/
<R> R evalSha(String key, Mode mode, String shaDigest, ReturnType returnType, List<Object> keys, Object... values);
/**
* Executes Lua script stored in Redis scripts cache by SHA-1 digest
*
* @param <R> - type of result
* @param mode - execution mode
* @param shaDigest - SHA-1 digest
* @param returnType - return type
* @return result object
*/
<R> R evalSha(Mode mode, String shaDigest, ReturnType returnType);
/**
* Executes Lua script
*
* @param key - used to locate Redis node in Cluster which stores cached Lua script
* @param mode - execution mode
* @param luaScript - lua script
* @param returnType - return type
* @param keys - keys available through KEYS param in script
* @param values - values available through ARGV param in script
* @return result object
* @param <R> - type of result
*/
<R> R eval(String key, Mode mode, String luaScript, ReturnType returnType, List<Object> keys, Object... values);
/**
* Executes a Lua script.
* The script is executed over all Redis master or slave nodes in cluster depending on <code>mode</code> value.
* <code>resultMapper</code> function reduces all results from Redis nodes into one.
*
* @param mode - execution mode
* @param luaScript - lua script
* @param returnType - return type
* @param resultMapper - function for reducing multiple results into one
* @param values - values available through ARGV param in script
* @return result object
* @param <R> - type of result
*/
<R> R eval(Mode mode, String luaScript, ReturnType returnType, Function<Collection<R>, R> resultMapper, Object... values);
/**
* Executes Lua script
*
* @param <R> - type of result
* @param mode - execution mode
* @param luaScript - lua script
* @param returnType - return type
* @param keys - keys available through KEYS param in script
* @param values - values available through ARGV param in script
* @return result object
*/
<R> R eval(Mode mode, String luaScript, ReturnType returnType, List<Object> keys, Object... values);
/**
* Executes Lua script
*
* @param <R> - type of result
* @param mode - execution mode
* @param luaScript - lua script
* @param returnType - return type
* @return result object
*/
<R> R eval(Mode mode, String luaScript, ReturnType returnType);
/**
* Loads Lua script into Redis scripts cache and returns its SHA-1 digest
*
* @param luaScript - lua script
* @return SHA-1 digest
*/
String scriptLoad(String luaScript);
/**
* Checks for presence Lua scripts in Redis script cache by SHA-1 digest.
*
* @param shaDigests - collection of SHA-1 digests
* @return list of booleans corresponding to collection SHA-1 digests
*/
List<Boolean> scriptExists(String... shaDigests);
/**
* Kills currently executed Lua script
*
*/
void scriptKill();
/**
* Flushes Lua script cache.
*
*/
void scriptFlush();
}
| ReturnType |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/nationalized/MaterializedNClobBindTest.java | {
"start": 4153,
"end": 5182
} | class ____ implements InvocationHandler {
private WrapperOptions wrapperOptions;
PreparedStatementHandler(WrapperOptions wrapperOptions) {
this.wrapperOptions = wrapperOptions;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
final String methodName = method.getName();
if ( "setNCharacterStream".equals( methodName ) ) {
if ( wrapperOptions.useStreamForLobBinding() ) {
return null;
}
else {
throw new IllegalStateException( "PreparedStatement#setNCharacterStream unexpectedly called" );
}
}
else if ( "setNClob".equals( methodName ) ) {
if ( !wrapperOptions.useStreamForLobBinding() ) {
return null;
}
else {
throw new IllegalStateException( "PreparedStatement#setNClob unexpectedly called" );
}
}
else if ( "setNString".equals( methodName ) ) {
return null;
}
else {
throw new UnsupportedOperationException( methodName + " is not supported." );
}
}
}
}
| PreparedStatementHandler |
java | quarkusio__quarkus | extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxEventBusMetrics.java | {
"start": 4632,
"end": 6339
} | class ____ {
private final String address;
private final LongAdder count;
private final LongAdder delivered;
private final LongAdder discarded;
Handler(String address) {
if (address == null) {
this.address = null;
this.count = null;
this.delivered = null;
this.discarded = null;
return;
}
this.address = address;
this.count = new LongAdder();
this.delivered = new LongAdder();
this.discarded = new LongAdder();
Gauge.builder("eventBus.handlers", count::longValue)
.description("Number of handlers per address")
.tags(tags.and("address", address))
.register(registry);
Gauge.builder("eventBus.delivered", delivered::longValue)
.description("Number of messages delivered")
.tags(tags.and("address", address))
.register(registry);
Gauge.builder("eventBus.discarded", discarded::longValue)
.description("Number of messages discarded")
.tags(tags.and("address", address))
.register(registry);
}
public Handler increment() {
count.increment();
return this;
}
public boolean decrement() {
count.decrement();
return count.longValue() == 0;
}
public void delivered() {
delivered.increment();
}
public void discarded() {
discarded.increment();
}
}
}
| Handler |
java | google__guice | core/src/com/google/inject/TypeLiteral.java | {
"start": 8280,
"end": 11868
} | interface ____ by, this.
* @since 2.0
*/
public TypeLiteral<?> getSupertype(Class<?> supertype) {
checkArgument(
supertype.isAssignableFrom(rawType), "%s is not a supertype of %s", supertype, this.type);
return resolve(MoreTypes.getGenericSupertype(type, rawType, supertype));
}
/**
* Returns the resolved generic type of {@code field}.
*
* @param field a field defined by this or any superclass.
* @since 2.0
*/
public TypeLiteral<?> getFieldType(Field field) {
checkArgument(
field.getDeclaringClass().isAssignableFrom(rawType),
"%s is not defined by a supertype of %s",
field,
type);
return resolve(field.getGenericType());
}
/**
* Returns the resolved generic parameter types of {@code methodOrConstructor}.
*
* @param methodOrConstructor a method or constructor defined by this or any supertype.
* @since 2.0
*/
public List<TypeLiteral<?>> getParameterTypes(Member methodOrConstructor) {
Type[] genericParameterTypes;
if (methodOrConstructor instanceof Method) {
Method method = (Method) methodOrConstructor;
checkArgument(
method.getDeclaringClass().isAssignableFrom(rawType),
"%s is not defined by a supertype of %s",
method,
type);
genericParameterTypes = method.getGenericParameterTypes();
} else if (methodOrConstructor instanceof Constructor) {
Constructor<?> constructor = (Constructor<?>) methodOrConstructor;
checkArgument(
constructor.getDeclaringClass().isAssignableFrom(rawType),
"%s does not construct a supertype of %s",
constructor,
type);
genericParameterTypes = constructor.getGenericParameterTypes();
} else {
throw new IllegalArgumentException("Not a method or a constructor: " + methodOrConstructor);
}
return resolveAll(genericParameterTypes);
}
/**
* Returns the resolved generic exception types thrown by {@code constructor}.
*
* @param methodOrConstructor a method or constructor defined by this or any supertype.
* @since 2.0
*/
public List<TypeLiteral<?>> getExceptionTypes(Member methodOrConstructor) {
Type[] genericExceptionTypes;
if (methodOrConstructor instanceof Method) {
Method method = (Method) methodOrConstructor;
checkArgument(
method.getDeclaringClass().isAssignableFrom(rawType),
"%s is not defined by a supertype of %s",
method,
type);
genericExceptionTypes = method.getGenericExceptionTypes();
} else if (methodOrConstructor instanceof Constructor) {
Constructor<?> constructor = (Constructor<?>) methodOrConstructor;
checkArgument(
constructor.getDeclaringClass().isAssignableFrom(rawType),
"%s does not construct a supertype of %s",
constructor,
type);
genericExceptionTypes = constructor.getGenericExceptionTypes();
} else {
throw new IllegalArgumentException("Not a method or a constructor: " + methodOrConstructor);
}
return resolveAll(genericExceptionTypes);
}
/**
* Returns the resolved generic return type of {@code method}.
*
* @param method a method defined by this or any supertype.
* @since 2.0
*/
public TypeLiteral<?> getReturnType(Method method) {
checkArgument(
method.getDeclaringClass().isAssignableFrom(rawType),
"%s is not defined by a supertype of %s",
method,
type);
return resolve(method.getGenericReturnType());
}
}
| implemented |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/concurrent/locks/LockingVisitors.java | {
"start": 19351,
"end": 21548
} | class ____<O> extends LVBuilder<O, ReadWriteLock, Builder<O>> {
/**
* Constructs a new instance.
*/
public Builder() {
// empty
}
@Override
public ReadWriteLockVisitor<O> get() {
return new ReadWriteLockVisitor<>(this);
}
@Override
public Builder<O> setLock(final ReadWriteLock readWriteLock) {
setReadLockSupplier(readWriteLock::readLock);
setWriteLockSupplier(readWriteLock::writeLock);
return super.setLock(readWriteLock);
}
}
/**
* Creates a new builder.
*
* @param <O> the wrapped object type.
* @return a new builder.
* @since 3.18.0
*/
public static <O> Builder<O> builder() {
return new Builder<>();
}
/**
* Constructs a new instance from a builder.
*
* @param builder a builder.
*/
private ReadWriteLockVisitor(final Builder<O> builder) {
super(builder);
}
/**
* Creates a new instance with the given object and lock.
*
* @param object The object to protect. The caller is supposed to drop all references to the locked object.
* @param readWriteLock the lock to use.
* @see LockingVisitors
*/
protected ReadWriteLockVisitor(final O object, final ReadWriteLock readWriteLock) {
super(object, readWriteLock, readWriteLock::readLock, readWriteLock::writeLock);
}
}
/**
* Wraps a {@link ReentrantLock} and object to protect. To access the object, use the methods {@link #acceptReadLocked(FailableConsumer)},
* {@link #acceptWriteLocked(FailableConsumer)}, {@link #applyReadLocked(FailableFunction)}, and {@link #applyWriteLocked(FailableFunction)}. The visitor
* holds the lock while the consumer or function is called.
*
* @param <O> The type of the object to protect.
* @see LockingVisitors#reentrantLockVisitor(Object)
* @since 3.18.0
*/
public static | Builder |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/JiraComponentBuilderFactory.java | {
"start": 11598,
"end": 14337
} | class ____
extends AbstractComponentBuilder<JiraComponent>
implements JiraComponentBuilder {
@Override
protected JiraComponent buildConcreteComponent() {
return new JiraComponent();
}
private org.apache.camel.component.jira.JiraConfiguration getOrCreateConfiguration(JiraComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.jira.JiraConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "delay": getOrCreateConfiguration((JiraComponent) component).setDelay((java.lang.Integer) value); return true;
case "jiraUrl": getOrCreateConfiguration((JiraComponent) component).setJiraUrl((java.lang.String) value); return true;
case "bridgeErrorHandler": ((JiraComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((JiraComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((JiraComponent) component).setAutowiredEnabled((boolean) value); return true;
case "configuration": ((JiraComponent) component).setConfiguration((org.apache.camel.component.jira.JiraConfiguration) value); return true;
case "healthCheckConsumerEnabled": ((JiraComponent) component).setHealthCheckConsumerEnabled((boolean) value); return true;
case "healthCheckProducerEnabled": ((JiraComponent) component).setHealthCheckProducerEnabled((boolean) value); return true;
case "accessToken": getOrCreateConfiguration((JiraComponent) component).setAccessToken((java.lang.String) value); return true;
case "consumerKey": getOrCreateConfiguration((JiraComponent) component).setConsumerKey((java.lang.String) value); return true;
case "password": getOrCreateConfiguration((JiraComponent) component).setPassword((java.lang.String) value); return true;
case "privateKey": getOrCreateConfiguration((JiraComponent) component).setPrivateKey((java.lang.String) value); return true;
case "username": getOrCreateConfiguration((JiraComponent) component).setUsername((java.lang.String) value); return true;
case "verificationCode": getOrCreateConfiguration((JiraComponent) component).setVerificationCode((java.lang.String) value); return true;
default: return false;
}
}
}
} | JiraComponentBuilderImpl |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java | {
"start": 2721,
"end": 12609
} | class ____ {
{
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
}
static Logger LOG = NameNode.stateChangeLog;
/** check if DFS can handle corrupted blocks properly */
@Test
public void testFileCorruption() throws Exception {
MiniDFSCluster cluster = null;
DFSTestUtil util = new DFSTestUtil.Builder().setName("TestFileCorruption").
setNumFiles(20).build();
try {
Configuration conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fs = cluster.getFileSystem();
util.createFiles(fs, "/srcdat");
// Now deliberately remove the blocks
String bpid = cluster.getNamesystem().getBlockPoolId();
DataNode dn = cluster.getDataNodes().get(2);
Map<DatanodeStorage, BlockListAsLongs> blockReports =
dn.getFSDataset().getBlockReports(bpid);
assertTrue(!blockReports.isEmpty(), "Blocks do not exist on data-dir");
for (BlockListAsLongs report : blockReports.values()) {
for (BlockReportReplica brr : report) {
LOG.info("Deliberately removing block {}", brr.getBlockName());
cluster.getFsDatasetTestUtils(2).getMaterializedReplica(
new ExtendedBlock(bpid, brr)).deleteData();
}
}
assertTrue(
util.checkFiles(fs, "/srcdat"), "Corrupted replicas not handled properly.");
util.cleanup(fs, "/srcdat");
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/** check if local FS can handle corrupted blocks properly */
@Test
public void testLocalFileCorruption() throws Exception {
Configuration conf = new HdfsConfiguration();
Path file = new Path(PathUtils.getTestDirName(getClass()), "corruptFile");
FileSystem fs = FileSystem.getLocal(conf);
DataOutputStream dos = fs.create(file);
dos.writeBytes("original bytes");
dos.close();
// Now deliberately corrupt the file
dos = new DataOutputStream(new FileOutputStream(file.toString()));
dos.writeBytes("corruption");
dos.close();
// Now attempt to read the file
DataInputStream dis = fs.open(file, 512);
try {
LOG.info("A ChecksumException is expected to be logged.");
dis.readByte();
} catch (ChecksumException ignore) {
//expect this exception but let any NPE get thrown
}
fs.delete(file, true);
}
/** Test the case that a replica is reported corrupt while it is not
* in blocksMap. Make sure that ArrayIndexOutOfBounds does not thrown.
* See Hadoop-4351.
*/
@Test
public void testArrayOutOfBoundsException() throws Exception {
MiniDFSCluster cluster = null;
try {
Configuration conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
final Path FILE_PATH = new Path("/tmp.txt");
final long FILE_LEN = 1L;
DFSTestUtil.createFile(fs, FILE_PATH, FILE_LEN, (short)2, 1L);
// get the block
final String bpid = cluster.getNamesystem().getBlockPoolId();
ExtendedBlock blk = getFirstBlock(cluster.getDataNodes().get(0), bpid);
assertFalse(blk==null, "Data directory does not contain any blocks or there was an "
+ "IO error");
// start a third datanode
cluster.startDataNodes(conf, 1, true, null, null);
ArrayList<DataNode> datanodes = cluster.getDataNodes();
assertEquals(datanodes.size(), 3);
DataNode dataNode = datanodes.get(2);
// report corrupted block by the third datanode
DatanodeRegistration dnR = InternalDataNodeTestUtils.
getDNRegistrationForBP(dataNode, blk.getBlockPoolId());
FSNamesystem ns = cluster.getNamesystem();
ns.writeLock(RwLockMode.BM);
try {
cluster.getNamesystem().getBlockManager().findAndMarkBlockAsCorrupt(blk,
new DatanodeInfoBuilder().setNodeID(dnR).build(), "TEST",
"STORAGE_ID");
} finally {
ns.writeUnlock(RwLockMode.BM, "testArrayOutOfBoundsException");
}
// open the file
fs.open(FILE_PATH);
//clean up
fs.delete(FILE_PATH, false);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testCorruptionWithDiskFailure() throws Exception {
MiniDFSCluster cluster = null;
try {
Configuration conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
BlockManager bm = cluster.getNamesystem().getBlockManager();
FileSystem fs = cluster.getFileSystem();
final Path FILE_PATH = new Path("/tmp.txt");
final long FILE_LEN = 1L;
DFSTestUtil.createFile(fs, FILE_PATH, FILE_LEN, (short) 3, 1L);
// get the block
final String bpid = cluster.getNamesystem().getBlockPoolId();
File storageDir = cluster.getInstanceStorageDir(0, 0);
File dataDir = MiniDFSCluster.getFinalizedDir(storageDir, bpid);
assertTrue(dataDir.exists(), "Data directory does not exist");
ExtendedBlock blk = getFirstBlock(cluster.getDataNodes().get(0), bpid);
if (blk == null) {
blk = getFirstBlock(cluster.getDataNodes().get(0), bpid);
}
assertFalse(blk == null, "Data directory does not contain any blocks or there was an" +
" " +
"IO error");
ArrayList<DataNode> datanodes = cluster.getDataNodes();
assertEquals(datanodes.size(), 3);
FSNamesystem ns = cluster.getNamesystem();
//fail the storage on that node which has the block
try {
ns.writeLock(RwLockMode.BM);
updateAllStorages(bm);
} finally {
ns.writeUnlock(RwLockMode.BM, "testCorruptionWithDiskFailure");
}
ns.writeLock(RwLockMode.BM);
try {
markAllBlocksAsCorrupt(bm, blk);
} finally {
ns.writeUnlock(RwLockMode.BM, "testCorruptionWithDiskFailure");
}
// open the file
fs.open(FILE_PATH);
//clean up
fs.delete(FILE_PATH, false);
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testSetReplicationWhenBatchIBR() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 100);
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INCREMENTAL_INTERVAL_MSEC_KEY,
30000);
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_FILE_CLOSE_NUM_COMMITTED_ALLOWED_KEY,
1);
DistributedFileSystem dfs;
try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(3).build()) {
final int bufferSize = 1024; // 1024 Bytes each time
byte[] outBuffer = new byte[bufferSize];
dfs = cluster.getFileSystem();
String fileName = "/testSetRep1";
Path filePath = new Path(fileName);
FSDataOutputStream out = dfs.create(filePath);
out.write(outBuffer, 0, bufferSize);
out.close();
//sending the FBR to Delay next IBR
cluster.triggerBlockReports();
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
try {
cluster.triggerBlockReports();
if (cluster.getNamesystem().getBlocksTotal() == 1) {
return true;
}
} catch (Exception e) {
// Ignore the exception
}
return false;
}
}, 10, 3000);
fileName = "/testSetRep2";
filePath = new Path(fileName);
out = dfs.create(filePath);
out.write(outBuffer, 0, bufferSize);
out.close();
dfs.setReplication(filePath, (short) 10);
cluster.triggerBlockReports();
// underreplicated Blocks should be one after setrep
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override public Boolean get() {
try {
return cluster.getNamesystem().getBlockManager()
.getLowRedundancyBlocksCount() == 1;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
}, 10, 3000);
assertEquals(0,
cluster.getNamesystem().getBlockManager().getMissingBlocksCount());
}
}
private void markAllBlocksAsCorrupt(BlockManager bm,
ExtendedBlock blk) throws IOException {
for (DatanodeStorageInfo info : bm.getStorages(blk.getLocalBlock())) {
bm.findAndMarkBlockAsCorrupt(
blk, info.getDatanodeDescriptor(), info.getStorageID(), "STORAGE_ID");
}
}
private void updateAllStorages(BlockManager bm) {
for (DatanodeDescriptor dd : bm.getDatanodeManager().getDatanodes()) {
Set<DatanodeStorageInfo> setInfos = new HashSet<DatanodeStorageInfo>();
DatanodeStorageInfo[] infos = dd.getStorageInfos();
Random random = new Random();
for (int i = 0; i < infos.length; i++) {
int blkId = random.nextInt(101);
DatanodeStorage storage = new DatanodeStorage(Integer.toString(blkId),
DatanodeStorage.State.FAILED, StorageType.DISK);
infos[i].updateFromStorage(storage);
setInfos.add(infos[i]);
}
}
}
private static ExtendedBlock getFirstBlock(DataNode dn, String bpid) {
Map<DatanodeStorage, BlockListAsLongs> blockReports =
dn.getFSDataset().getBlockReports(bpid);
for (BlockListAsLongs blockLongs : blockReports.values()) {
for (BlockReportReplica block : blockLongs) {
return new ExtendedBlock(bpid, block);
}
}
return null;
}
}
| TestFileCorruption |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/CheckReturnValue.java | {
"start": 18283,
"end": 19652
} | enum ____ {
NONE,
API_ERASED_SIGNATURE,
}
/** Returns a fix that adds {@code @CanIgnoreReturnValue} to the given symbol, if possible. */
private static Fix fixAtDeclarationSite(MethodSymbol symbol, VisitorState state) {
MethodTree method = findDeclaration(symbol, state);
if (method == null || isGeneratedConstructor(method)) {
return emptyFix();
}
SuggestedFix.Builder fix = SuggestedFix.builder();
fix.prefixWith(
method, "@" + qualifyType(state, fix, CanIgnoreReturnValue.class.getName()) + " ");
getAnnotationsWithSimpleName(method.getModifiers().getAnnotations(), CHECK_RETURN_VALUE)
.forEach(fix::delete);
fix.setShortDescription("Annotate the method with @CanIgnoreReturnValue");
return fix.build();
}
private static @Nullable MethodTree findDeclaration(Symbol symbol, VisitorState state) {
JavacProcessingEnvironment javacEnv = JavacProcessingEnvironment.instance(state.context);
TreePath declPath = Trees.instance(javacEnv).getPath(symbol);
// Skip fields declared in other compilation units since we can't make a fix for them here.
if (declPath != null
&& declPath.getCompilationUnit() == state.getPath().getCompilationUnit()
&& declPath.getLeaf() instanceof MethodTree methodTree) {
return methodTree;
}
return null;
}
}
| MessageTrailerStyle |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/decorators/validation/AbstractDecoratorWithAbstractMethodTest.java | {
"start": 964,
"end": 1081
} | interface ____<T, U> {
T convert(T value);
}
@Priority(1)
@Decorator
static abstract | Converter |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java | {
"start": 1224,
"end": 1525
} | class ____ {
public static final String NAME = "cluster:admin/xpack/connector/sync_job/delete";
public static final ActionType<AcknowledgedResponse> INSTANCE = new ActionType<>(NAME);
private DeleteConnectorSyncJobAction() {/* no instances */}
public static | DeleteConnectorSyncJobAction |
java | grpc__grpc-java | grpclb/src/generated/main/grpc/io/grpc/lb/v1/LoadBalancerGrpc.java | {
"start": 11212,
"end": 12342
} | class ____
extends LoadBalancerBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
LoadBalancerMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (LoadBalancerGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new LoadBalancerFileDescriptorSupplier())
.addMethod(getBalanceLoadMethod())
.build();
}
}
}
return result;
}
}
| LoadBalancerMethodDescriptorSupplier |
java | junit-team__junit5 | junit-platform-reporting/src/main/java/org/junit/platform/reporting/open/xml/LegacyReportingName.java | {
"start": 581,
"end": 874
} | class ____ extends ChildElement<Metadata, LegacyReportingName> {
static final QualifiedName ELEMENT = QualifiedName.of(JUnitFactory.NAMESPACE, "legacyReportingName");
LegacyReportingName(Context context, String value) {
super(context, ELEMENT);
withContent(value);
}
}
| LegacyReportingName |
java | elastic__elasticsearch | modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedDocument.java | {
"start": 1330,
"end": 4635
} | class ____ implements Writeable, ToXContentObject {
static final ParseField RATING_FIELD = new ParseField("rating");
static final ParseField DOC_ID_FIELD = new ParseField("_id");
static final ParseField INDEX_FIELD = new ParseField("_index");
private static final ConstructingObjectParser<RatedDocument, Void> PARSER = new ConstructingObjectParser<>(
"rated_document",
a -> new RatedDocument((String) a[0], (String) a[1], (Integer) a[2])
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), INDEX_FIELD);
PARSER.declareString(ConstructingObjectParser.constructorArg(), DOC_ID_FIELD);
PARSER.declareInt(ConstructingObjectParser.constructorArg(), RATING_FIELD);
}
private final int rating;
private final DocumentKey key;
public RatedDocument(String index, String id, int rating) {
this.key = new DocumentKey(index, id);
this.rating = rating;
}
RatedDocument(StreamInput in) throws IOException {
this.key = new DocumentKey(in.readString(), in.readString());
this.rating = in.readVInt();
}
public DocumentKey getKey() {
return this.key;
}
public String getIndex() {
return key.index();
}
public String getDocID() {
return key.docId();
}
public int getRating() {
return rating;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(key.index());
out.writeString(key.docId());
out.writeVInt(rating);
}
static RatedDocument fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(INDEX_FIELD.getPreferredName(), key.index());
builder.field(DOC_ID_FIELD.getPreferredName(), key.docId());
builder.field(RATING_FIELD.getPreferredName(), rating);
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
RatedDocument other = (RatedDocument) obj;
return Objects.equals(key, other.key) && Objects.equals(rating, other.rating);
}
@Override
public final int hashCode() {
return Objects.hash(key, rating);
}
/**
* a joint document key consisting of the documents index and id
*/
record DocumentKey(String index, String docId) {
DocumentKey {
if (Strings.isNullOrEmpty(index)) {
throw new IllegalArgumentException("Index must be set for each rated document");
}
if (Strings.isNullOrEmpty(docId)) {
throw new IllegalArgumentException("DocId must be set for each rated document");
}
}
@Override
public String toString() {
return "{\"_index\":\"" + index + "\",\"_id\":\"" + docId + "\"}";
}
}
}
| RatedDocument |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/catalog/ResolvedCatalogTable.java | {
"start": 1413,
"end": 1693
} | class ____ serializable for
* persistence if and only if the originating {@link CatalogTable} implements {@link
* CatalogTable#getOptions()}. Catalog implementations are encouraged to use {@link
* ResolvedCatalogTable#toProperties(SqlFactory)}.
*/
@PublicEvolving
public final | are |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/boot/conditional2/JavaConfigAnnotationReferenceBeanConditionalTest.java | {
"start": 2175,
"end": 2958
} | class ____ {
@BeforeAll
public static void beforeAll() {
DubboBootstrap.reset();
}
@AfterAll
public static void afterAll() {
DubboBootstrap.reset();
}
@Autowired
private HelloService helloService;
@Autowired
private ApplicationContext applicationContext;
@Test
void testConsumer() {
Map<String, HelloService> helloServiceMap = applicationContext.getBeansOfType(HelloService.class);
Assertions.assertEquals(1, helloServiceMap.size());
Assertions.assertNotNull(helloServiceMap.get("helloService"));
Assertions.assertNull(helloServiceMap.get("myHelloService"));
}
@Order(Integer.MAX_VALUE - 2)
@Configuration
public static | JavaConfigAnnotationReferenceBeanConditionalTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/planning/RandomizedAssignmentRounding.java | {
"start": 1615,
"end": 3864
} | class ____ {
private static final Logger logger = LogManager.getLogger(RandomizedAssignmentRounding.class);
private static final double EPS = 1e-6;
private final Random random;
private final int rounds;
private final Collection<Node> nodes;
private final Collection<AssignmentPlan.Deployment> deployments;
private final AssignmentHolder assignmentHolder;
RandomizedAssignmentRounding(Random random, int rounds, Collection<Node> nodes, Collection<AssignmentPlan.Deployment> deployments) {
if (rounds <= 0) {
throw new IllegalArgumentException("rounds must be > 0");
}
this.random = Objects.requireNonNull(random);
this.rounds = rounds;
this.nodes = Objects.requireNonNull(nodes);
this.deployments = Objects.requireNonNull(deployments);
this.assignmentHolder = new AssignmentHolder();
}
AssignmentPlan computePlan(
Map<Tuple<AssignmentPlan.Deployment, Node>, Double> allocationVars,
Map<Tuple<AssignmentPlan.Deployment, Node>, Double> assignmentVars
) {
AssignmentPlan bestPlan = assignmentHolder.toPlan();
assignmentHolder.initializeAssignments(allocationVars, assignmentVars);
assignmentHolder.assignUnderSubscribedNodes();
List<Tuple<AssignmentPlan.Deployment, Node>> softAssignmentQueue = assignmentHolder.createSoftAssignmentQueue();
if (softAssignmentQueue.isEmpty() == false) {
logger.debug(() -> "Random assignment rounding across [" + rounds + "] rounds");
for (int i = 0; i < rounds; i++) {
AssignmentHolder randomizedAssignments = new AssignmentHolder(assignmentHolder);
randomizedAssignments.doRandomizedRounding(softAssignmentQueue);
AssignmentPlan randomizedPlan = randomizedAssignments.toPlan();
if (randomizedPlan.compareTo(bestPlan) > 0) {
bestPlan = randomizedPlan;
}
}
} else {
AssignmentPlan plan = assignmentHolder.toPlan();
if (plan.compareTo(bestPlan) > 0) {
bestPlan = plan;
}
}
return bestPlan;
}
private | RandomizedAssignmentRounding |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/onetoone/cache/OneToOneCacheEnableSelectingTest.java | {
"start": 3365,
"end": 3983
} | class ____ {
@Id
@GeneratedValue
private Long id;
@Version
private Integer version;
@OneToOne(fetch = FetchType.LAZY, cascade = { CascadeType.PERSIST, CascadeType.MERGE })
private Product product;
public ProductConfig() {}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Integer getVersion() {
return version;
}
public void setVersion(Integer version) {
this.version = version;
}
public Product getProduct() {
return product;
}
public void setProduct(Product product) {
this.product = product;
}
}
}
| ProductConfig |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Issue220.java | {
"start": 137,
"end": 526
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
Attr attr = new Attr();
attr.jTType = 123;
attr.value = "xxxx";
attr.symbol = "yyyy";
String text = JSON.toJSONString(attr);
Assert.assertEquals("{\"jTType\":123,\"symbol\":\"yyyy\",\"value\":\"xxxx\"}", text);
}
public static | Issue220 |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java | {
"start": 1324,
"end": 3113
} | class ____ extends ESTestCase {
public void testVersionComparison() {
TransformConfigVersion V_7_2_0 = TransformConfigVersion.V_7_2_0;
TransformConfigVersion V_8_0_0 = TransformConfigVersion.V_8_0_0;
TransformConfigVersion V_10 = TransformConfigVersion.V_10;
assertThat(V_7_2_0.before(V_8_0_0), is(true));
assertThat(V_7_2_0.before(V_7_2_0), is(false));
assertThat(V_8_0_0.before(V_7_2_0), is(false));
assertThat(V_8_0_0.before(V_10), is(true));
assertThat(V_10.before(V_10), is(false));
assertThat(V_7_2_0.onOrBefore(V_8_0_0), is(true));
assertThat(V_7_2_0.onOrBefore(V_7_2_0), is(true));
assertThat(V_8_0_0.onOrBefore(V_7_2_0), is(false));
assertThat(V_8_0_0.onOrBefore(V_10), is(true));
assertThat(V_10.onOrBefore(V_10), is(true));
assertThat(V_7_2_0.after(V_8_0_0), is(false));
assertThat(V_7_2_0.after(V_7_2_0), is(false));
assertThat(V_8_0_0.after(V_7_2_0), is(true));
assertThat(V_10.after(V_8_0_0), is(true));
assertThat(V_10.after(V_10), is(false));
assertThat(V_7_2_0.onOrAfter(V_8_0_0), is(false));
assertThat(V_7_2_0.onOrAfter(V_7_2_0), is(true));
assertThat(V_8_0_0.onOrAfter(V_7_2_0), is(true));
assertThat(V_10.onOrAfter(V_8_0_0), is(true));
assertThat(V_10.onOrAfter(V_10), is(true));
assertThat(V_7_2_0.onOrAfter(V_10), is(false));
assertThat(V_7_2_0, Matchers.is(lessThan(V_8_0_0)));
assertThat(V_7_2_0.compareTo(V_7_2_0), is(0));
assertThat(V_8_0_0, Matchers.is(greaterThan(V_7_2_0)));
assertThat(V_10, Matchers.is(greaterThan(V_8_0_0)));
assertThat(V_10.compareTo(V_10), is(0));
}
public static | TransformConfigVersionTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RemoveUnusedImportsTest.java | {
"start": 7626,
"end": 7915
} | class ____<T extends Collection> {
public void foo(T t) {}
}
""")
.expectUnchanged()
.addInputLines(
"in/B.java",
"""
import java.util.Collection;
import java.util.List;
public | A |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java | {
"start": 3619,
"end": 12254
} | class ____ {
final static String LOCATION = "LOG_DIR:";
String location;
long start;
long length;
}
private static LogFileDetail getLogFileDetail(TaskAttemptID taskid,
LogName filter,
boolean isCleanup)
throws IOException {
File indexFile = getIndexFile(taskid, isCleanup);
BufferedReader fis = new BufferedReader(new InputStreamReader(
SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null),
StandardCharsets.UTF_8));
//the format of the index file is
//LOG_DIR: <the dir where the task logs are really stored>
//stdout:<start-offset in the stdout file> <length>
//stderr:<start-offset in the stderr file> <length>
//syslog:<start-offset in the syslog file> <length>
LogFileDetail l = new LogFileDetail();
String str = null;
try {
str = fis.readLine();
if (str == null) { // the file doesn't have anything
throw new IOException("Index file for the log of " + taskid
+ " doesn't exist.");
}
l.location = str.substring(str.indexOf(LogFileDetail.LOCATION)
+ LogFileDetail.LOCATION.length());
// special cases are the debugout and profile.out files. They are
// guaranteed
// to be associated with each task attempt since jvm reuse is disabled
// when profiling/debugging is enabled
if (filter.equals(LogName.DEBUGOUT) || filter.equals(LogName.PROFILE)) {
l.length = new File(l.location, filter.toString()).length();
l.start = 0;
fis.close();
return l;
}
str = fis.readLine();
while (str != null) {
// look for the exact line containing the logname
if (str.contains(filter.toString())) {
str = str.substring(filter.toString().length() + 1);
String[] startAndLen = str.split(" ");
l.start = Long.parseLong(startAndLen[0]);
l.length = Long.parseLong(startAndLen[1]);
break;
}
str = fis.readLine();
}
fis.close();
fis = null;
} finally {
IOUtils.cleanupWithLogger(LOG, fis);
}
return l;
}
private static File getTmpIndexFile(TaskAttemptID taskid, boolean isCleanup) {
return new File(getAttemptDir(taskid, isCleanup), "log.tmp");
}
static File getIndexFile(TaskAttemptID taskid, boolean isCleanup) {
return new File(getAttemptDir(taskid, isCleanup), "log.index");
}
/**
* Obtain the owner of the log dir. This is
* determined by checking the job's log directory.
*/
static String obtainLogDirOwner(TaskAttemptID taskid) throws IOException {
Configuration conf = new Configuration();
FileSystem raw = FileSystem.getLocal(conf).getRaw();
Path jobLogDir = new Path(getJobDir(taskid.getJobID()).getAbsolutePath());
FileStatus jobStat = raw.getFileStatus(jobLogDir);
return jobStat.getOwner();
}
static String getBaseLogDir() {
return System.getProperty("hadoop.log.dir");
}
static File getAttemptDir(TaskAttemptID taskid, boolean isCleanup) {
String cleanupSuffix = isCleanup ? ".cleanup" : "";
return new File(getJobDir(taskid.getJobID()), taskid + cleanupSuffix);
}
private static long prevOutLength;
private static long prevErrLength;
private static long prevLogLength;
private static synchronized
void writeToIndexFile(String logLocation,
boolean isCleanup) throws IOException {
// To ensure atomicity of updates to index file, write to temporary index
// file first and then rename.
File tmpIndexFile = getTmpIndexFile(currentTaskid, isCleanup);
BufferedOutputStream bos = null;
DataOutputStream dos = null;
try{
bos = new BufferedOutputStream(
SecureIOUtils.createForWrite(tmpIndexFile, 0644));
dos = new DataOutputStream(bos);
//the format of the index file is
//LOG_DIR: <the dir where the task logs are really stored>
//STDOUT: <start-offset in the stdout file> <length>
//STDERR: <start-offset in the stderr file> <length>
//SYSLOG: <start-offset in the syslog file> <length>
dos.writeBytes(LogFileDetail.LOCATION + logLocation + "\n"
+ LogName.STDOUT.toString() + ":");
dos.writeBytes(Long.toString(prevOutLength) + " ");
dos.writeBytes(Long.toString(new File(logLocation, LogName.STDOUT
.toString()).length() - prevOutLength)
+ "\n" + LogName.STDERR + ":");
dos.writeBytes(Long.toString(prevErrLength) + " ");
dos.writeBytes(Long.toString(new File(logLocation, LogName.STDERR
.toString()).length() - prevErrLength)
+ "\n" + LogName.SYSLOG.toString() + ":");
dos.writeBytes(Long.toString(prevLogLength) + " ");
dos.writeBytes(Long.toString(new File(logLocation, LogName.SYSLOG
.toString()).length() - prevLogLength)
+ "\n");
dos.close();
dos = null;
bos.close();
bos = null;
} finally {
IOUtils.cleanupWithLogger(LOG, dos, bos);
}
File indexFile = getIndexFile(currentTaskid, isCleanup);
Path indexFilePath = new Path(indexFile.getAbsolutePath());
Path tmpIndexFilePath = new Path(tmpIndexFile.getAbsolutePath());
if (localFS == null) {// set localFS once
localFS = FileSystem.getLocal(new Configuration());
}
localFS.rename (tmpIndexFilePath, indexFilePath);
}
private static void resetPrevLengths(String logLocation) {
prevOutLength = new File(logLocation, LogName.STDOUT.toString()).length();
prevErrLength = new File(logLocation, LogName.STDERR.toString()).length();
prevLogLength = new File(logLocation, LogName.SYSLOG.toString()).length();
}
private volatile static TaskAttemptID currentTaskid = null;
@SuppressWarnings("unchecked")
public synchronized static void syncLogs(String logLocation,
TaskAttemptID taskid,
boolean isCleanup)
throws IOException {
System.out.flush();
System.err.flush();
if (currentTaskid != taskid) {
currentTaskid = taskid;
resetPrevLengths(logLocation);
}
writeToIndexFile(logLocation, isCleanup);
}
public static synchronized void syncLogsShutdown(
ScheduledExecutorService scheduler)
{
// flush standard streams
//
System.out.flush();
System.err.flush();
if (scheduler != null) {
scheduler.shutdownNow();
}
// flush & close all appenders
LogManager.shutdown();
}
@SuppressWarnings("unchecked")
public static synchronized void syncLogs() {
// flush standard streams
//
System.out.flush();
System.err.flush();
// flush flushable appenders
//
final Logger rootLogger = Logger.getRootLogger();
flushAppenders(rootLogger);
final Enumeration<Logger> allLoggers = rootLogger.getLoggerRepository().
getCurrentLoggers();
while (allLoggers.hasMoreElements()) {
final Logger l = allLoggers.nextElement();
flushAppenders(l);
}
}
@SuppressWarnings("unchecked")
private static void flushAppenders(Logger l) {
final Enumeration<Appender> allAppenders = l.getAllAppenders();
while (allAppenders.hasMoreElements()) {
final Appender a = allAppenders.nextElement();
if (a instanceof Flushable) {
try {
((Flushable) a).flush();
} catch (IOException ioe) {
System.err.println(a + ": Failed to flush!"
+ StringUtils.stringifyException(ioe));
}
}
}
}
public static ScheduledExecutorService createLogSyncer() {
final ScheduledExecutorService scheduler =
HadoopExecutors.newSingleThreadScheduledExecutor(
new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
final Thread t = Executors.defaultThreadFactory().newThread(r);
t.setDaemon(true);
t.setName("Thread for syncLogs");
return t;
}
});
ShutdownHookManager.get().addShutdownHook(new Runnable() {
@Override
public void run() {
TaskLog.syncLogsShutdown(scheduler);
}
}, 50);
scheduler.scheduleWithFixedDelay(
new Runnable() {
@Override
public void run() {
TaskLog.syncLogs();
}
}, 0L, 5L, TimeUnit.SECONDS);
return scheduler;
}
/**
* The filter for userlogs.
*/
@InterfaceAudience.Private
public | LogFileDetail |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/internals/PartitionStates.java | {
"start": 1126,
"end": 1970
} | class ____ a useful building block for doing fetch requests where topic partitions have to be rotated via
* round-robin to ensure fairness and some level of determinism given the existence of a limit on the fetch response
* size. Because the serialization of fetch requests is more efficient if all partitions for the same topic are grouped
* together, we do such grouping in the method `set`.
*
* As partitions are moved to the end, the same topic may be repeated more than once. In the optimal case, a single
* topic would "wrap around" and appear twice. However, as partitions are fetched in different orders and partition
* leadership changes, we will deviate from the optimal. If this turns out to be an issue in practice, we can improve
* it by tracking the partitions per node or calling `set` every so often.
*
* Note that this | is |
java | apache__dubbo | dubbo-registry/dubbo-registry-api/src/main/java/org/apache/dubbo/registry/support/AbstractRegistryFactory.java | {
"start": 1855,
"end": 5163
} | class ____ implements RegistryFactory, ScopeModelAware {
private static final ErrorTypeAwareLogger LOGGER =
LoggerFactory.getErrorTypeAwareLogger(AbstractRegistryFactory.class);
private RegistryManager registryManager;
protected ApplicationModel applicationModel;
@Override
public void setApplicationModel(ApplicationModel applicationModel) {
this.applicationModel = applicationModel;
this.registryManager = applicationModel.getBeanFactory().getBean(RegistryManager.class);
}
@Override
public Registry getRegistry(URL url) {
if (registryManager == null) {
throw new IllegalStateException("Unable to fetch RegistryManager from ApplicationModel BeanFactory. "
+ "Please check if `setApplicationModel` has been override.");
}
Registry defaultNopRegistry = registryManager.getDefaultNopRegistryIfDestroyed();
if (null != defaultNopRegistry) {
return defaultNopRegistry;
}
url = URLBuilder.from(url)
.setPath(RegistryService.class.getName())
.addParameter(INTERFACE_KEY, RegistryService.class.getName())
.removeParameter(TIMESTAMP_KEY)
.removeAttribute(EXPORT_KEY)
.removeAttribute(REFER_KEY)
.build();
String key = createRegistryCacheKey(url);
Registry registry = null;
boolean check = UrlUtils.isCheck(url);
// Lock the registry access process to ensure a single instance of the registry
registryManager.getRegistryLock().lock();
try {
// double check
// fix https://github.com/apache/dubbo/issues/7265.
defaultNopRegistry = registryManager.getDefaultNopRegistryIfDestroyed();
if (null != defaultNopRegistry) {
return defaultNopRegistry;
}
registry = registryManager.getRegistry(key);
if (registry != null) {
return registry;
}
// create registry by spi/ioc
registry = createRegistry(url);
if (check && registry == null) {
throw new IllegalStateException("Can not create registry " + url);
}
if (registry != null) {
registryManager.putRegistry(key, registry);
}
} catch (Exception e) {
if (check) {
throw new RuntimeException("Can not create registry " + url, e);
} else {
// 1-11 Failed to obtain or create registry (service) object.
LOGGER.warn(REGISTRY_FAILED_CREATE_INSTANCE, "", "", "Failed to obtain or create registry ", e);
}
} finally {
// Release the lock
registryManager.getRegistryLock().unlock();
}
return registry;
}
/**
* Create the key for the registries cache.
* This method may be overridden by the sub-class.
*
* @param url the registration {@link URL url}
* @return non-null
*/
protected String createRegistryCacheKey(URL url) {
return url.toServiceStringWithoutResolving();
}
protected abstract Registry createRegistry(URL url);
}
| AbstractRegistryFactory |
java | apache__flink | flink-python/src/main/java/org/apache/flink/table/runtime/operators/python/scalar/AbstractPythonScalarFunctionOperator.java | {
"start": 1987,
"end": 2879
} | class ____ all stream operators to execute Python {@link ScalarFunction}s. It executes the
* Python {@link ScalarFunction}s in separate Python execution environment.
*
* <p>The inputs are assumed as the following format: {{{ +------------------+--------------+ |
* forwarded fields | extra fields | +------------------+--------------+ }}}.
*
* <p>The Python UDFs may take input columns directly from the input row or the execution result of
* Java UDFs: 1) The input columns from the input row can be referred from the 'forwarded fields';
* 2) The Java UDFs will be computed and the execution results can be referred from the 'extra
* fields'.
*
* <p>The outputs will be as the following format: {{{
* +------------------+-------------------------+ | forwarded fields | scalar function results |
* +------------------+-------------------------+ }}}.
*/
@Internal
public abstract | for |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/aggregate/HANAAggregateSupport.java | {
"start": 28642,
"end": 29583
} | class ____ extends AggregateXmlWriteExpression
implements WriteExpressionRenderer {
private final String path;
RootXmlWriteExpression(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
super( aggregateColumn, aggregateColumn.getColumnDefinition() );
path = aggregateColumn.getSelectionExpression();
initializeSubExpressions( aggregateColumn, columns );
}
@Override
protected String getTagName() {
return XmlHelper.ROOT_TAG;
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String basePath;
if ( qualifier == null || qualifier.isBlank() ) {
basePath = path;
}
else {
basePath = qualifier + "." + path;
}
append( sqlAppender, basePath, translator, aggregateColumnWriteExpression );
}
}
private static | RootXmlWriteExpression |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e5/b/DerivedIdentityIdClassParentSameIdTypeEmbeddedIdDepTest.java | {
"start": 730,
"end": 2072
} | class ____ {
@Test
public void testOneToOneExplicitJoinColumn(SessionFactoryScope scope) {
final MetadataImplementor metadata = scope.getMetadataImplementor();
assertTrue( SchemaUtil.isColumnPresent( "MedicalHistory", "FK1", metadata ) );
assertTrue( SchemaUtil.isColumnPresent( "MedicalHistory", "FK2", metadata ) );
assertTrue( !SchemaUtil.isColumnPresent( "MedicalHistory", "firstname", metadata ) );
Person e = new Person();
final String firstName = "Emmanuel";
final String lastName = "Bernard";
e.firstName = firstName;
e.lastName = lastName;
scope.inTransaction(
session -> {
session.persist( e );
MedicalHistory d = new MedicalHistory();
d.patient = e;
session.persist( d );
session.flush();
session.clear();
d = session.get( MedicalHistory.class, d.id );
assertEquals( d.id.firstName, d.patient.firstName );
}
);
scope.inTransaction(
session -> {
PersonId pId = new PersonId( firstName, lastName );
MedicalHistory d2 = session.get( MedicalHistory.class, pId );
Person p2 = session.get( Person.class, pId );
assertEquals( pId.firstName, d2.patient.firstName );
assertEquals( pId.firstName, p2.firstName );
session.remove( d2 );
session.remove( p2 );
}
);
}
}
| DerivedIdentityIdClassParentSameIdTypeEmbeddedIdDepTest |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/jackson/OAuth2AuthorizationRequestMixin.java | {
"start": 974,
"end": 1560
} | class ____ used to serialize/deserialize {@link OAuth2AuthorizationRequest}.
* It also registers a custom deserializer {@link OAuth2AuthorizationRequestDeserializer}.
*
* @author Joe Grandja
* @since 7.0
* @see OAuth2AuthorizationRequest
* @see OAuth2AuthorizationRequestDeserializer
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
@JsonDeserialize(using = OAuth2AuthorizationRequestDeserializer.class)
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY, getterVisibility = JsonAutoDetect.Visibility.NONE,
isGetterVisibility = JsonAutoDetect.Visibility.NONE)
abstract | is |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/TestingResourceManager.java | {
"start": 2256,
"end": 4880
} | class ____ extends ResourceManager<ResourceID> {
private final Consumer<ResourceID> stopWorkerConsumer;
private final CompletableFuture<Void> readyToServeFuture;
public TestingResourceManager(
RpcService rpcService,
UUID leaderSessionId,
ResourceID resourceId,
HeartbeatServices heartbeatServices,
DelegationTokenManager delegationTokenManager,
SlotManager slotManager,
ResourceManagerPartitionTrackerFactory clusterPartitionTrackerFactory,
BlocklistHandler.Factory blocklistHandlerFactory,
JobLeaderIdService jobLeaderIdService,
FatalErrorHandler fatalErrorHandler,
ResourceManagerMetricGroup resourceManagerMetricGroup,
Consumer<ResourceID> stopWorkerConsumer,
CompletableFuture<Void> readyToServeFuture) {
super(
rpcService,
leaderSessionId,
resourceId,
heartbeatServices,
delegationTokenManager,
slotManager,
clusterPartitionTrackerFactory,
blocklistHandlerFactory,
jobLeaderIdService,
new ClusterInformation("localhost", 1234),
fatalErrorHandler,
resourceManagerMetricGroup,
RpcUtils.INF_TIMEOUT,
ForkJoinPool.commonPool());
this.stopWorkerConsumer = stopWorkerConsumer;
this.readyToServeFuture = readyToServeFuture;
}
@Override
protected void initialize() throws ResourceManagerException {
// noop
}
@Override
protected void terminate() {
// noop
}
@Override
protected void internalDeregisterApplication(
ApplicationStatus finalStatus, @Nullable String diagnostics)
throws ResourceManagerException {
// noop
}
@Override
protected Optional<ResourceID> getWorkerNodeIfAcceptRegistration(ResourceID resourceID) {
return Optional.of(resourceID);
}
@Override
public void stopWorkerIfSupported(ResourceID worker) {
stopWorkerConsumer.accept(worker);
}
@Override
public CompletableFuture<Void> getReadyToServeFuture() {
return readyToServeFuture;
}
@Override
protected ResourceAllocator getResourceAllocator() {
return NonSupportedResourceAllocatorImpl.INSTANCE;
}
public <T> CompletableFuture<T> runInMainThread(Callable<T> callable, Duration timeout) {
return callAsync(callable, timeout);
}
}
| TestingResourceManager |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesRequestTests.java | {
"start": 626,
"end": 5769
} | class ____ extends ESTestCase {
public void testValidation() {
final SuggestProfilesRequest request1 = new SuggestProfilesRequest(randomDataKeys(), randomName(), randomSize(), randomHint());
assertThat(request1.validate(), nullValue());
}
public void testValidationWillNotAllowNegativeSize() {
final SuggestProfilesRequest request1 = new SuggestProfilesRequest(
randomDataKeys(),
randomName(),
randomIntBetween(Integer.MIN_VALUE, -1),
randomHint()
);
assertThat(request1.validate().getMessage(), containsString("[size] parameter cannot be negative"));
}
public void testValidationWillNotAllowEmptyHints() {
final SuggestProfilesRequest request1 = new SuggestProfilesRequest(
randomDataKeys(),
randomName(),
randomSize(),
new SuggestProfilesRequest.Hint(null, null)
);
assertThat(request1.validate().getMessage(), containsString("[hint] parameter cannot be empty"));
final SuggestProfilesRequest request2 = new SuggestProfilesRequest(
randomDataKeys(),
randomName(),
randomSize(),
new SuggestProfilesRequest.Hint(List.of(), null)
);
assertThat(request2.validate().getMessage(), containsString("[uids] hint cannot be empty"));
}
public void testValidationLabels() {
final SuggestProfilesRequest request1 = new SuggestProfilesRequest(
randomDataKeys(),
randomName(),
randomSize(),
new SuggestProfilesRequest.Hint(
null,
randomFrom(Map.of(), randomMap(2, 5, () -> new Tuple<>(randomAlphaOfLength(20), randomAlphaOfLengthBetween(3, 8))))
)
);
assertThat(request1.validate().getMessage(), containsString("[labels] hint supports a single key"));
final SuggestProfilesRequest request2 = new SuggestProfilesRequest(
randomDataKeys(),
randomName(),
randomSize(),
new SuggestProfilesRequest.Hint(
null,
Map.of(randomFrom("*", "a*", "*b", "a*b"), randomList(1, 5, () -> randomAlphaOfLengthBetween(3, 8)))
)
);
assertThat(request2.validate().getMessage(), containsString("[labels] hint key cannot contain wildcard"));
final SuggestProfilesRequest request3 = new SuggestProfilesRequest(
randomDataKeys(),
randomName(),
randomSize(),
new SuggestProfilesRequest.Hint(null, Map.of(randomAlphaOfLength(5), List.of()))
);
assertThat(request3.validate().getMessage(), containsString("[labels] hint value cannot be empty"));
}
public void testErrorOnHintInstantiation() {
final ElasticsearchParseException e1 = expectThrows(
ElasticsearchParseException.class,
() -> new SuggestProfilesRequest.Hint(
null,
Map.of(randomAlphaOfLength(5), randomFrom(0, 42.0, randomBoolean(), Map.of(randomAlphaOfLength(5), randomAlphaOfLength(5))))
)
);
assertThat(e1.getMessage(), containsString("[labels] hint supports either string or list of strings as its value"));
final ElasticsearchParseException e2 = expectThrows(
ElasticsearchParseException.class,
() -> new SuggestProfilesRequest.Hint(null, Map.of(randomAlphaOfLength(5), List.of(0, randomAlphaOfLength(8))))
);
assertThat(e2.getMessage(), containsString("[labels] hint supports either string value or list of strings"));
}
private int randomSize() {
return randomIntBetween(0, Integer.MAX_VALUE);
}
private Set<String> randomDataKeys() {
return Set.copyOf(randomList(0, 5, () -> randomAlphaOfLengthBetween(3, 8)));
}
private String randomName() {
return randomAlphaOfLengthBetween(0, 8);
}
public static SuggestProfilesRequest.Hint randomHint() {
switch (randomIntBetween(0, 3)) {
case 0 -> {
return new SuggestProfilesRequest.Hint(randomList(1, 5, () -> randomAlphaOfLength(20)), null);
}
case 1 -> {
return new SuggestProfilesRequest.Hint(
null,
Map.of(
randomAlphaOfLengthBetween(3, 8),
randomFrom(randomAlphaOfLengthBetween(3, 8), randomList(1, 5, () -> randomAlphaOfLengthBetween(3, 8)))
)
);
}
case 2 -> {
return new SuggestProfilesRequest.Hint(
randomList(1, 5, () -> randomAlphaOfLength(20)),
Map.of(
randomAlphaOfLengthBetween(3, 8),
randomFrom(randomAlphaOfLengthBetween(3, 8), randomList(1, 5, () -> randomAlphaOfLengthBetween(3, 8)))
)
);
}
default -> {
return null;
}
}
}
}
| SuggestProfilesRequestTests |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-activemq/src/dockerTest/java/smoketest/activemq/SampleActiveMqTests.java | {
"start": 1637,
"end": 2051
} | class ____ {
@Container
@ServiceConnection
private static final ActiveMQContainer container = TestImage.container(ActiveMQContainer.class);
@Autowired
private Producer producer;
@Test
void sendSimpleMessage(CapturedOutput output) {
this.producer.send("Test message");
Awaitility.waitAtMost(Duration.ofMinutes(1)).untilAsserted(() -> assertThat(output).contains("Test message"));
}
}
| SampleActiveMqTests |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/floatarray/FloatArrayAssert_doesNotHaveDuplicates_Test.java | {
"start": 1126,
"end": 1944
} | class ____ extends FloatArrayAssertBaseTest {
@Override
protected FloatArrayAssert invoke_api_method() {
return assertions.doesNotHaveDuplicates();
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertDoesNotHaveDuplicates(getInfo(assertions), getActual(assertions));
}
@Test
void should_pass_with_precision_specified_as_last_argument() {
// GIVEN
float[] actual = arrayOf(1.0f, 1.2f);
// THEN
assertThat(actual).doesNotHaveDuplicates(withPrecision(0.1f));
}
@Test
void should_pass_with_precision_specified_in_comparator() {
// GIVEN
float[] actual = arrayOf(1.0f, 1.05f);
// THEN
assertThat(actual).usingComparatorWithPrecision(0.01f)
.doesNotHaveDuplicates();
}
}
| FloatArrayAssert_doesNotHaveDuplicates_Test |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java | {
"start": 6883,
"end": 7801
} | interface ____ should be created
* @param ugi the user who is making the calls on the proxy object
* @param withRetries certain interfaces have a non-standard retry policy
* @return an object containing both the proxy and the associated
* delegation token service it corresponds to
* @throws IOException
*/
public static <T> ProxyAndInfo<T> createNonHAProxy(
Configuration conf, InetSocketAddress nnAddr, Class<T> xface,
UserGroupInformation ugi, boolean withRetries) throws IOException {
return createNonHAProxy(conf, nnAddr, xface, ugi, withRetries, null, null);
}
/**
* Creates an explicitly non-HA-enabled proxy object. Most of the time you
* don't want to use this, and should instead use {@link NameNodeProxies#createProxy}.
*
* @param conf the configuration object
* @param nnAddr address of the remote NN to connect to
* @param xface the IPC | which |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeOnErrorXTest.java | {
"start": 1020,
"end": 5931
} | class ____ extends RxJavaTest {
@Test
public void onErrorReturnConst() {
Maybe.error(new TestException())
.onErrorReturnItem(1)
.test()
.assertResult(1);
}
@Test
public void onErrorReturn() {
Maybe.error(new TestException())
.onErrorReturn(Functions.justFunction(1))
.test()
.assertResult(1);
}
@Test
public void onErrorComplete() {
Maybe.error(new TestException())
.onErrorComplete()
.test()
.assertResult();
}
@Test
public void onErrorCompleteTrue() {
Maybe.error(new TestException())
.onErrorComplete(Functions.alwaysTrue())
.test()
.assertResult();
}
@Test
public void onErrorCompleteFalse() {
Maybe.error(new TestException())
.onErrorComplete(Functions.alwaysFalse())
.test()
.assertFailure(TestException.class);
}
@Test
public void onErrorReturnFunctionThrows() {
TestHelper.assertCompositeExceptions(Maybe.error(new TestException())
.onErrorReturn(new Function<Throwable, Object>() {
@Override
public Object apply(Throwable v) throws Exception {
throw new IOException();
}
})
.to(TestHelper.testConsumer()), TestException.class, IOException.class);
}
@Test
public void onErrorCompletePredicateThrows() {
TestHelper.assertCompositeExceptions(Maybe.error(new TestException())
.onErrorComplete(new Predicate<Throwable>() {
@Override
public boolean test(Throwable v) throws Exception {
throw new IOException();
}
})
.to(TestHelper.testConsumer()), TestException.class, IOException.class);
}
@Test
public void onErrorResumeNext() {
Maybe.error(new TestException())
.onErrorResumeNext(Functions.justFunction(Maybe.just(1)))
.test()
.assertResult(1);
}
@Test
public void onErrorResumeNextFunctionThrows() {
TestHelper.assertCompositeExceptions(Maybe.error(new TestException())
.onErrorResumeNext(new Function<Throwable, Maybe<Object>>() {
@Override
public Maybe<Object> apply(Throwable v) throws Exception {
throw new IOException();
}
})
.to(TestHelper.testConsumer()), TestException.class, IOException.class);
}
@Test
public void onErrorReturnSuccess() {
Maybe.just(1)
.onErrorReturnItem(2)
.test()
.assertResult(1);
}
@Test
public void onErrorReturnEmpty() {
Maybe.<Integer>empty()
.onErrorReturnItem(2)
.test()
.assertResult();
}
@Test
public void onErrorReturnDispose() {
TestHelper.checkDisposed(PublishProcessor.create().singleElement().onErrorReturnItem(1));
}
@Test
public void onErrorReturnDoubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeMaybe(new Function<Maybe<Object>, MaybeSource<Object>>() {
@Override
public MaybeSource<Object> apply(Maybe<Object> v) throws Exception {
return v.onErrorReturnItem(1);
}
});
}
@Test
public void onErrorCompleteSuccess() {
Maybe.just(1)
.onErrorComplete()
.test()
.assertResult(1);
}
@Test
public void onErrorCompleteEmpty() {
Maybe.<Integer>empty()
.onErrorComplete()
.test()
.assertResult();
}
@Test
public void onErrorCompleteDispose() {
TestHelper.checkDisposed(PublishProcessor.create().singleElement().onErrorComplete());
}
@Test
public void onErrorCompleteDoubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeMaybe(new Function<Maybe<Object>, MaybeSource<Object>>() {
@Override
public MaybeSource<Object> apply(Maybe<Object> v) throws Exception {
return v.onErrorComplete();
}
});
}
@Test
public void onErrorNextDispose() {
TestHelper.checkDisposed(PublishProcessor.create().singleElement().onErrorResumeWith(Maybe.just(1)));
}
@Test
public void onErrorNextDoubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeMaybe(new Function<Maybe<Object>, MaybeSource<Object>>() {
@Override
public MaybeSource<Object> apply(Maybe<Object> v) throws Exception {
return v.onErrorResumeWith(Maybe.just(1));
}
});
}
@Test
public void onErrorNextIsAlsoError() {
Maybe.error(new TestException("Main"))
.onErrorResumeWith(Maybe.error(new TestException("Secondary")))
.to(TestHelper.testConsumer())
.assertFailureAndMessage(TestException.class, "Secondary");
}
}
| MaybeOnErrorXTest |
java | elastic__elasticsearch | x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorServiceTests.java | {
"start": 2519,
"end": 31727
} | class ____ extends ESTestCase {
private static final DateFormatter FORMATTER = DateFormatter.forPattern("iso8601").withZone(ZoneOffset.UTC);
public void testIsGreenWhenRunningAndPoliciesConfigured() {
var clusterState = createClusterStateWith(new SnapshotLifecycleMetadata(createSlmPolicy(), RUNNING, null));
var service = createSlmHealthIndicatorService(clusterState);
assertThat(
service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO),
equalTo(
new HealthIndicatorResult(
NAME,
GREEN,
"Snapshot Lifecycle Management is running",
new SimpleHealthIndicatorDetails(Map.of("slm_status", RUNNING, "policies", 1)),
Collections.emptyList(),
Collections.emptyList()
)
)
);
}
public void testIsYellowWhenNotRunningAndPoliciesConfigured() {
var status = randomFrom(STOPPED, STOPPING);
var clusterState = createClusterStateWith(new SnapshotLifecycleMetadata(createSlmPolicy(), status, null));
var service = createSlmHealthIndicatorService(clusterState);
assertThat(
service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO),
equalTo(
new HealthIndicatorResult(
NAME,
YELLOW,
"Snapshot Lifecycle Management is not running",
new SimpleHealthIndicatorDetails(Map.of("slm_status", status, "policies", 1)),
Collections.singletonList(
new HealthIndicatorImpact(
NAME,
SlmHealthIndicatorService.AUTOMATION_DISABLED_IMPACT_ID,
3,
"Scheduled snapshots are not running. New backup snapshots will not be created automatically.",
List.of(ImpactArea.BACKUP)
)
),
List.of(SLM_NOT_RUNNING)
)
)
);
}
public void testIsGreenWhenNotRunningAndNoPolicies() {
var status = randomFrom(STOPPED, STOPPING);
var clusterState = createClusterStateWith(new SnapshotLifecycleMetadata(Map.of(), status, null));
var service = createSlmHealthIndicatorService(clusterState);
assertThat(
service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO),
equalTo(
new HealthIndicatorResult(
NAME,
GREEN,
"No Snapshot Lifecycle Management policies configured",
new SimpleHealthIndicatorDetails(Map.of("slm_status", status, "policies", 0)),
Collections.emptyList(),
Collections.emptyList()
)
)
);
}
public void testIsGreenWhenNoMetadata() {
var clusterState = createClusterStateWith(null);
var service = createSlmHealthIndicatorService(clusterState);
assertThat(
service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO),
equalTo(
new HealthIndicatorResult(
NAME,
GREEN,
"No Snapshot Lifecycle Management policies configured",
new SimpleHealthIndicatorDetails(Map.of("slm_status", RUNNING, "policies", 0)),
Collections.emptyList(),
Collections.emptyList()
)
)
);
}
public void testIsGreenWhenPoliciesHaveFailedForLessThanWarningThreshold() {
long execTime = System.currentTimeMillis();
long window = TimeUnit.HOURS.toMillis(24) - 5000L; // Just under 24 hours.
var clusterState = createClusterStateWith(
new SnapshotLifecycleMetadata(
createSlmPolicy(
snapshotInvocation(randomBoolean() ? null : execTime, execTime + 1000L),
snapshotInvocation(null, execTime + window + 1000L),
randomLongBetween(0, 4),
null
),
RUNNING,
null
)
);
var service = createSlmHealthIndicatorService(clusterState);
assertThat(
service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO),
equalTo(
new HealthIndicatorResult(
NAME,
GREEN,
"Snapshot Lifecycle Management is running",
new SimpleHealthIndicatorDetails(Map.of("slm_status", RUNNING, "policies", 1)),
Collections.emptyList(),
Collections.emptyList()
)
)
);
}
public void testIsYellowWhenPoliciesHaveFailedForMoreThanWarningThreshold() {
long execTime = System.currentTimeMillis();
long window = TimeUnit.HOURS.toMillis(24) + 5000L; // 24 hours and some extra room.
long failedInvocations1 = randomLongBetween(5L, Long.MAX_VALUE);
long failedInvocations2 = randomLongBetween(5L, Long.MAX_VALUE);
long failedInvocations3 = randomLongBetween(5L, Long.MAX_VALUE);
var clusterState = createClusterStateWith(
new SnapshotLifecycleMetadata(
Map.of(
"test-policy",
SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(
new SnapshotLifecyclePolicy("test-policy", "<test-policy-{now/d}>", "", "test-repository", null, null, null)
)
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(execTime, execTime + 1000L))
.setLastFailure(snapshotInvocation(null, execTime + window + 1000L))
.setInvocationsSinceLastSuccess(failedInvocations1)
.build(),
"test-policy-without-any-success",
SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(
new SnapshotLifecyclePolicy(
"test-policy-without-any-success",
"<test-policy-{now/d}>",
"",
"test-repository",
null,
null,
null
)
)
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(null)
.setLastFailure(snapshotInvocation(null, execTime + window + 1000L))
.setInvocationsSinceLastSuccess(failedInvocations2)
.build(),
"test-policy-without-success-start-time",
SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(
new SnapshotLifecyclePolicy(
"test-policy-without-success-start-time",
"<test-policy-{now/d}>",
"",
"test-repository",
null,
null,
null
)
)
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(null, execTime))
.setLastFailure(snapshotInvocation(null, execTime + window + 1000L))
.setInvocationsSinceLastSuccess(failedInvocations3)
.build()
),
RUNNING,
null
)
);
var service = createSlmHealthIndicatorService(clusterState);
HealthIndicatorResult calculate = service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO);
assertThat(
calculate,
equalTo(
new HealthIndicatorResult(
NAME,
YELLOW,
"Encountered [3] unhealthy snapshot lifecycle management policies",
new SimpleHealthIndicatorDetails(
Map.of(
"slm_status",
RUNNING,
"policies",
3,
"unhealthy_policies",
Map.of(
"count",
3,
"invocations_since_last_success",
Map.of(
"test-policy",
failedInvocations1,
"test-policy-without-any-success",
failedInvocations2,
"test-policy-without-success-start-time",
failedInvocations3
)
)
)
),
Collections.singletonList(
new HealthIndicatorImpact(
NAME,
SlmHealthIndicatorService.STALE_SNAPSHOTS_IMPACT_ID,
2,
"Some automated snapshots have not had a successful execution recently. Indices restored from affected "
+ "snapshots may not contain recent changes.",
List.of(ImpactArea.BACKUP)
)
),
List.of(
new Diagnosis(
SlmHealthIndicatorService.checkRecentlyFailedSnapshots(
"Several automated snapshot policies are unhealthy:\n"
+ "- [test-policy] had ["
+ failedInvocations1
+ "] repeated failures without successful execution since ["
+ FORMATTER.formatMillis(execTime)
+ "]\n"
+ "- [test-policy-without-any-success] had ["
+ failedInvocations2
+ "] repeated failures without successful execution\n"
+ "- [test-policy-without-success-start-time] had ["
+ failedInvocations3
+ "] repeated failures without successful execution",
"Check the snapshot lifecycle policies for detailed failure info:\n"
+ "- GET /_slm/policy/test-policy?human\n"
+ "- GET /_slm/policy/test-policy-without-any-success?human\n"
+ "- GET /_slm/policy/test-policy-without-success-start-time?human"
),
List.of(
new Diagnosis.Resource(
Type.SLM_POLICY,
List.of("test-policy", "test-policy-without-any-success", "test-policy-without-success-start-time")
)
)
)
)
)
)
);
}
public void testIsYellowWhenPoliciesExceedsUnhealthyIfNoSnapshotWithin() {
long tenMinutesAgo = Instant.now().minus(10, ChronoUnit.MINUTES).toEpochMilli();
long fiveMinutesAgo = Instant.now().minus(5, ChronoUnit.MINUTES).toEpochMilli();
TimeValue threshold = TimeValue.ONE_MINUTE;
var clusterState = createClusterStateWith(
new SnapshotLifecycleMetadata(
Map.of(
"test-policy-no-time-configured",
SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(
new SnapshotLifecyclePolicy("test-policy-no-time-configured", "test", "", "test-repository", null, null, null)
)
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(tenMinutesAgo, fiveMinutesAgo))
.build(),
"test-policy-does-not-exceed-time",
SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(
new SnapshotLifecyclePolicy(
"test-policy-does-not-exceeds-time",
"test",
"",
"test-repository",
null,
null,
new TimeValue(1, TimeUnit.HOURS)
)
)
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(tenMinutesAgo, fiveMinutesAgo))
.build(),
"test-policy-exceeds-time",
SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(
new SnapshotLifecyclePolicy("test-policy-exceeds-time", "test", "", "test-repository", null, null, threshold)
)
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(tenMinutesAgo, fiveMinutesAgo))
.build(),
"test-policy-exceeds-time-without-success-start-time",
SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(
new SnapshotLifecyclePolicy(
"test-policy-exceeds-time-without-success-start-time",
"test",
"",
"test-repository",
null,
null,
threshold
)
)
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(null, fiveMinutesAgo))
.build()
// TODO: first snapshot
),
RUNNING,
null
)
);
SlmHealthIndicatorService service = createSlmHealthIndicatorService(clusterState);
HealthIndicatorResult calculate = service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO);
assertThat(
calculate,
equalTo(
new HealthIndicatorResult(
NAME,
YELLOW,
"Encountered [2] unhealthy snapshot lifecycle management policies",
new SimpleHealthIndicatorDetails(
Map.of(
"slm_status",
RUNNING,
"policies",
4,
"unhealthy_policies",
Map.of(
"count",
2,
"invocations_since_last_success",
Map.of("test-policy-exceeds-time", 0L, "test-policy-exceeds-time-without-success-start-time", 0L)
)
)
),
Collections.singletonList(
new HealthIndicatorImpact(
NAME,
SlmHealthIndicatorService.MISSING_SNAPSHOT_IMPACT_ID,
2,
"Some snapshot lifecycle policies have not had a snapshot for some time",
List.of(ImpactArea.BACKUP)
)
),
List.of(
new Diagnosis(
SlmHealthIndicatorService.contactSupport(
"Several automated snapshot policies are unhealthy:\n"
+ "- [test-policy-exceeds-time] has not had a snapshot for "
+ threshold.toHumanReadableString(2)
+ ", since ["
+ FORMATTER.formatMillis(tenMinutesAgo)
+ "]\n"
+ "- [test-policy-exceeds-time-without-success-start-time] has not had a snapshot for "
+ threshold.toHumanReadableString(2)
+ ", since ["
+ FORMATTER.formatMillis(fiveMinutesAgo)
+ "]",
"Check the snapshot lifecycle policies for detailed failure info:\n"
+ "- GET /_slm/policy/test-policy-exceeds-time?human\n"
+ "- GET /_slm/policy/test-policy-exceeds-time-without-success-start-time?human"
),
List.of(
new Diagnosis.Resource(
Type.SLM_POLICY,
List.of("test-policy-exceeds-time", "test-policy-exceeds-time-without-success-start-time")
)
)
)
)
)
)
);
}
public void testSnapshotPolicyExceedsWarningThresholdPredicate() {
SnapshotLifecyclePolicyMetadata slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, null))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.build();
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(15L, slmPolicyMetadata), is(false));
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(5L, slmPolicyMetadata), is(false));
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(1L, slmPolicyMetadata), is(false));
slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, null))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(1000L, 2000L))
.setInvocationsSinceLastSuccess(0L)
.build();
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(15L, slmPolicyMetadata), is(false));
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(5L, slmPolicyMetadata), is(false));
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(1L, slmPolicyMetadata), is(false));
slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, null))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(1000L, 2000L))
.setLastFailure(snapshotInvocation(null, 9000L))
.setInvocationsSinceLastSuccess(randomLongBetween(5L, 10L))
.build();
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(15L, slmPolicyMetadata), is(false));
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(5L, slmPolicyMetadata), is(true));
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(1L, slmPolicyMetadata), is(true));
slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, null))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(snapshotInvocation(8000L, 9000L))
.setLastFailure(snapshotInvocation(null, 2000L))
.setInvocationsSinceLastSuccess(0L)
.build();
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(15L, slmPolicyMetadata), is(false));
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(5L, slmPolicyMetadata), is(false));
assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(1L, slmPolicyMetadata), is(false));
}
public void testSnapshotPolicyMissingSnapshotTimeExceededPredicate() {
long tenMinutesAgo = Instant.now().minus(10, ChronoUnit.MINUTES).toEpochMilli();
long fiveMinutesAgo = Instant.now().minus(5, ChronoUnit.MINUTES).toEpochMilli();
// null unhealthyIfNoSnapshotWithin
{
SnapshotLifecyclePolicyMetadata slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, null))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(new SnapshotInvocationRecord("test-snapshot", tenMinutesAgo, fiveMinutesAgo, null))
.build();
assertThat(SlmHealthIndicatorService.missingSnapshotTimeExceeded(slmPolicyMetadata), is(false));
}
// does not exceed unhealthyIfNoSnapshotWithin
{
SnapshotLifecyclePolicyMetadata slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, TimeValue.MAX_VALUE))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(new SnapshotInvocationRecord("test-snapshot", tenMinutesAgo, fiveMinutesAgo, null))
.build();
assertThat(SlmHealthIndicatorService.missingSnapshotTimeExceeded(slmPolicyMetadata), is(false));
}
// exceed unhealthyIfNoSnapshotWithin
{
SnapshotLifecyclePolicyMetadata slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, TimeValue.ONE_MINUTE))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(new SnapshotInvocationRecord("test-snapshot", tenMinutesAgo, fiveMinutesAgo, null))
.build();
assertThat(SlmHealthIndicatorService.missingSnapshotTimeExceeded(slmPolicyMetadata), is(true));
}
// first snapshot, does not exceed unhealthyIfNoSnapshotWithin
{
SnapshotLifecyclePolicyMetadata slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, TimeValue.MAX_VALUE))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
// TODO: set first trigger time
.build();
assertThat(SlmHealthIndicatorService.missingSnapshotTimeExceeded(slmPolicyMetadata), is(false));
}
// first snapshot, exceed unhealthyIfNoSnapshotWithin
{
SnapshotLifecyclePolicyMetadata slmPolicyMetadata = SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(new SnapshotLifecyclePolicy("id", "test-policy", "", "test-repository", null, null, TimeValue.ONE_MINUTE))
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
// TODO: set first trigger time
.build();
assertThat(SlmHealthIndicatorService.missingSnapshotTimeExceeded(slmPolicyMetadata), is(false));
}
}
public void testSkippingFieldsWhenVerboseIsFalse() {
var status = randomFrom(STOPPED, STOPPING);
var clusterState = createClusterStateWith(new SnapshotLifecycleMetadata(createSlmPolicy(), status, null));
var service = createSlmHealthIndicatorService(clusterState);
assertThat(
service.calculate(false, HealthInfo.EMPTY_HEALTH_INFO),
equalTo(
new HealthIndicatorResult(
NAME,
YELLOW,
"Snapshot Lifecycle Management is not running",
HealthIndicatorDetails.EMPTY,
Collections.singletonList(
new HealthIndicatorImpact(
NAME,
SlmHealthIndicatorService.AUTOMATION_DISABLED_IMPACT_ID,
3,
"Scheduled snapshots are not running. New backup snapshots will not be created automatically.",
List.of(ImpactArea.BACKUP)
)
),
List.of()
)
)
);
}
// We expose the indicator name and the diagnoses in the x-pack usage API. In order to index them properly in a telemetry index
// they need to be declared in the health-api-indexer.edn in the telemetry repository.
public void testMappedFieldsForTelemetry() {
assertThat(SlmHealthIndicatorService.NAME, equalTo("slm"));
assertThat(
checkRecentlyFailedSnapshots("cause", "action").getUniqueId(),
equalTo("elasticsearch:health:slm:diagnosis:check_recent_snapshot_failures")
);
assertThat(SLM_NOT_RUNNING.definition().getUniqueId(), equalTo("elasticsearch:health:slm:diagnosis:slm_disabled"));
}
private static ClusterState createClusterStateWith(SnapshotLifecycleMetadata metadata) {
var builder = new ClusterState.Builder(new ClusterName("test-cluster"));
if (metadata != null) {
builder.metadata(new Metadata.Builder().putCustom(SnapshotLifecycleMetadata.TYPE, metadata));
}
return builder.build();
}
private static Map<String, SnapshotLifecyclePolicyMetadata> createSlmPolicy() {
return createSlmPolicy(null, null, 0L, null);
}
private static Map<String, SnapshotLifecyclePolicyMetadata> createSlmPolicy(
SnapshotInvocationRecord lastSuccess,
SnapshotInvocationRecord lastFailure,
long invocationsSinceLastSuccess,
TimeValue unhealthyIfNoSnapshotWithin
) {
return Map.of(
"test-policy",
SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(
new SnapshotLifecyclePolicy("policy-id", "test-policy", "", "test-repository", null, null, unhealthyIfNoSnapshotWithin)
)
.setVersion(1L)
.setModifiedDate(System.currentTimeMillis())
.setLastSuccess(lastSuccess)
.setLastFailure(lastFailure)
.setInvocationsSinceLastSuccess(invocationsSinceLastSuccess)
.build()
);
}
private static SnapshotInvocationRecord snapshotInvocation(@Nullable Long startTime, long stopTime) {
return new SnapshotInvocationRecord("test-policy-snapshot", startTime, stopTime, null);
}
private static SlmHealthIndicatorService createSlmHealthIndicatorService(ClusterState clusterState) {
var clusterService = mock(ClusterService.class);
when(clusterService.state()).thenReturn(clusterState);
ClusterSettings clusterSettings = new ClusterSettings(
Settings.EMPTY,
Set.of(LifecycleSettings.SLM_HEALTH_FAILED_SNAPSHOT_WARN_THRESHOLD_SETTING)
);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
return new SlmHealthIndicatorService(clusterService);
}
}
| SlmHealthIndicatorServiceTests |
java | elastic__elasticsearch | x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java | {
"start": 3604,
"end": 3894
} | class ____ extends EmptyHitExtractor {
static final TimestampExtractor INSTANCE = new TimestampExtractor();
@Override
public Timestamp extract(SearchHit hit) {
return Timestamp.of(String.valueOf(hit.docId()));
}
}
static | TimestampExtractor |
java | dropwizard__dropwizard | dropwizard-example/src/test/java/com/example/helloworld/IntegrationTest.java | {
"start": 1291,
"end": 2676
} | class ____ {
private static final String CONFIG = "test-example.yml";
@TempDir
static Path tempDir;
static Supplier<String> CURRENT_LOG = () -> tempDir.resolve("application.log").toString();
static Supplier<String> ARCHIVED_LOG = () -> tempDir.resolve("application-%d-%i.log.gz").toString();
static final DropwizardAppExtension<HelloWorldConfiguration> APP = new DropwizardAppExtension<>(
HelloWorldApplication.class, CONFIG,
new ResourceConfigurationSourceProvider(),
config("database.url", () -> "jdbc:h2:" + tempDir.resolve("database.h2")),
config("logging.appenders[1].currentLogFilename", CURRENT_LOG),
config("logging.appenders[1].archivedLogFilenamePattern", ARCHIVED_LOG)
);
@BeforeAll
public static void migrateDb() throws Exception {
APP.getApplication().run("db", "migrate", resourceFilePath(CONFIG));
}
@Test
void testHelloWorld() {
final Optional<String> name = Optional.of("Dr. IntegrationTest");
final Saying saying = APP.client().target("http://localhost:" + APP.getLocalPort() + "/hello-world")
.queryParam("name", name.get())
.request()
.get(Saying.class);
assertThat(saying.getContent()).isEqualTo(APP.getConfiguration().buildTemplate().render(name));
}
@Nested
| IntegrationTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollectorTests.java | {
"start": 980,
"end": 10495
} | class ____ extends ESTestCase {
static BigArrays mockBigArrays() {
return new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService());
}
private HashBasedTransactionStore transactionStore = null;
@After
public void closeReleasables() throws IOException {
Releasables.close(transactionStore);
}
public void testQueue() {
transactionStore = new HashBasedTransactionStore(mockBigArrays());
try (TopItemIds topItemIds = transactionStore.getTopItemIds();) {
FrequentItemSetCollector collector = new FrequentItemSetCollector(transactionStore, topItemIds, 5, Long.MAX_VALUE);
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 1L, 2L, 3L, 4L }, 10L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 5L, 6L, 7L, 8L }, 11L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 11L, 12L, 13L, 14L }, 9L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 21L, 2L, 3L, 4L }, 13L));
// queue should be full, drop weakest element
assertEquals(9L, addToCollector(collector, new long[] { 31L, 2L, 3L, 4L }, 14L));
assertEquals(10L, addToCollector(collector, new long[] { 41L, 2L, 3L, 4L }, 15L));
assertEquals(11L, addToCollector(collector, new long[] { 51L, 2L, 3L, 4L }, 16L));
// check that internal data has been removed as well
assertEquals(5, collector.getFrequentItemsByCount().size());
// fill slots with same doc count
assertEquals(13L, addToCollector(collector, new long[] { 61L, 2L, 3L, 4L }, 20L));
assertEquals(14L, addToCollector(collector, new long[] { 71L, 2L, 3L, 4L }, 20L));
assertEquals(15L, addToCollector(collector, new long[] { 81L, 2L, 3L, 4L }, 20L));
assertEquals(16L, addToCollector(collector, new long[] { 91L, 2L, 3L, 4L }, 20L));
assertEquals(20L, addToCollector(collector, new long[] { 101L, 2L, 3L, 4L }, 20L));
// check that internal map has only 1 key
assertEquals(1, collector.getFrequentItemsByCount().size());
// ignore set below current weakest one
assertEquals(20L, addToCollector(collector, new long[] { 111L, 2L, 3L, 4L }, 1L));
FrequentItemSetPriorityQueue queue = collector.getQueue();
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 101L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 91L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 81L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 71L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 61L, 2L, 3L, 4L })));
assertEquals(0, collector.size());
}
}
public void testClosedSetSkipping() {
transactionStore = new HashBasedTransactionStore(mockBigArrays());
try (TopItemIds topItemIds = transactionStore.getTopItemIds();) {
FrequentItemSetCollector collector = new FrequentItemSetCollector(transactionStore, topItemIds, 5, Long.MAX_VALUE);
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 1L, 2L, 3L, 4L }, 10L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 5L, 6L, 7L, 8L }, 11L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 11L, 12L, 13L, 14L }, 12L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 21L, 2L, 3L, 4L }, 13L));
// add a subset of the 1st entry, it should be ignored
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 1L, 2L, 3L }, 10L));
// fill slots with same doc count
assertEquals(10L, addToCollector(collector, new long[] { 61L, 2L, 3L, 4L }, 20L));
assertEquals(11L, addToCollector(collector, new long[] { 71L, 2L, 3L, 4L }, 20L));
assertEquals(12L, addToCollector(collector, new long[] { 81L, 2L, 3L, 4L }, 20L));
assertEquals(13L, addToCollector(collector, new long[] { 91L, 2L, 3L, 4L }, 20L));
// add a subset of an entry, it should be ignored
assertEquals(13L, addToCollector(collector, new long[] { 81L, 2L, 4L }, 20L));
FrequentItemSetPriorityQueue queue = collector.getQueue();
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 21L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 91L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 81L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 71L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 61L, 2L, 3L, 4L })));
assertEquals(0, collector.size());
}
}
public void testCopyOnAdd() {
transactionStore = new HashBasedTransactionStore(mockBigArrays());
try (TopItemIds topItemIds = transactionStore.getTopItemIds();) {
FrequentItemSetCollector collector = new FrequentItemSetCollector(transactionStore, topItemIds, 5, Long.MAX_VALUE);
long[] itemSet = new long[] { 1L, 2L, 3L, 4L, 5L };
assertEquals(Long.MAX_VALUE, addToCollector(collector, itemSet, 10L));
itemSet[0] = 42L;
itemSet[4] = 42L;
FrequentItemSetPriorityQueue queue = collector.getQueue();
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 1L, 2L, 3L, 4L, 5L })));
}
}
public void testLargerItemSetsPreference() {
transactionStore = new HashBasedTransactionStore(mockBigArrays());
try (TopItemIds topItemIds = transactionStore.getTopItemIds();) {
FrequentItemSetCollector collector = new FrequentItemSetCollector(transactionStore, topItemIds, 5, Long.MAX_VALUE);
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 1L, 2L, 3L, 4L }, 10L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 5L, 6L, 7L, 8L }, 11L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 11L, 12L, 13L, 14L }, 9L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 21L, 2L, 3L, 4L }, 13L));
// queue should be full, drop weakest element
assertEquals(9L, addToCollector(collector, new long[] { 31L, 2L, 3L, 4L }, 14L));
assertEquals(9L, collector.getLastSet().getDocCount());
assertEquals(4, collector.getLastSet().size());
// ignore set with same doc count but fewer items
assertEquals(9L, addToCollector(collector, new long[] { 22L, 23L, 24L }, 9L));
assertEquals(9L, collector.getLastSet().getDocCount());
assertEquals(4, collector.getLastSet().size());
// take set with same doc count but more items
assertEquals(9L, addToCollector(collector, new long[] { 25L, 26L, 27L, 28L, 29L }, 9L));
assertEquals(9L, collector.getLastSet().getDocCount());
assertEquals(5, collector.getLastSet().size());
FrequentItemSetPriorityQueue queue = collector.getQueue();
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 25L, 26L, 27L, 28L, 29L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 1L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 5L, 6L, 7L, 8L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 21L, 2L, 3L, 4L })));
assertThat(queue.pop().getItems(), equalTo(createItemSetBitSet(new long[] { 31L, 2L, 3L, 4L })));
assertEquals(0, collector.size());
}
}
public void testSuperSetAfterSubSet() {
transactionStore = new HashBasedTransactionStore(mockBigArrays());
try (TopItemIds topItemIds = transactionStore.getTopItemIds();) {
FrequentItemSetCollector collector = new FrequentItemSetCollector(transactionStore, topItemIds, 5, Long.MAX_VALUE);
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 1L, 2L, 3L, 4L, 6L }, 3L));
assertEquals(Long.MAX_VALUE, addToCollector(collector, new long[] { 1L, 2L, 3L, 4L, 6L, 8L }, 3L));
assertEquals(1, collector.size());
assertThat(collector.getQueue().pop().getItems(), equalTo(createItemSetBitSet(new long[] { 1L, 2L, 3L, 4L, 6L, 8L })));
}
}
private static ItemSetBitSet createItemSetBitSet(long[] longs) {
ItemSetBitSet itemsAsBitVector = new ItemSetBitSet();
for (int i = 0; i < longs.length; ++i) {
itemsAsBitVector.set((int) longs[i]);
}
return itemsAsBitVector;
}
private static long addToCollector(FrequentItemSetCollector collector, long[] longsRef, long docCount) {
return collector.add(createItemSetBitSet(longsRef), docCount);
}
}
| FrequentItemSetCollectorTests |
java | apache__kafka | connect/mirror/src/main/java/org/apache/kafka/connect/mirror/DefaultConfigPropertyFilter.java | {
"start": 3406,
"end": 4376
} | class ____ extends AbstractConfig {
static final ConfigDef DEF = new ConfigDef()
.define(CONFIG_PROPERTIES_EXCLUDE_CONFIG,
Type.LIST,
CONFIG_PROPERTIES_EXCLUDE_DEFAULT,
ConfigDef.ValidList.anyNonDuplicateValues(true, false),
Importance.HIGH,
CONFIG_PROPERTIES_EXCLUDE_DOC)
.define(USE_DEFAULTS_FROM,
Type.STRING,
USE_DEFAULTS_FROM_DEFAULT,
Importance.MEDIUM,
USE_DEFAULTS_FROM_DOC);
ConfigPropertyFilterConfig(Map<String, ?> props) {
super(DEF, props, false);
}
Pattern excludePattern() {
return MirrorUtils.compilePatternList(getList(CONFIG_PROPERTIES_EXCLUDE_CONFIG));
}
String useDefaultsFrom() {
return getString(USE_DEFAULTS_FROM);
}
}
}
| ConfigPropertyFilterConfig |
java | google__truth | core/src/main/java/com/google/common/truth/ActualValueInference.java | {
"start": 10627,
"end": 36500
} | class ____.
*/
private final ImmutableSetMultimap.Builder<Integer, StackEntry> actualValueAtLine;
InferenceMethodVisitor(
int access,
String owner,
String name,
String methodDescriptor,
ImmutableSetMultimap.Builder<Integer, StackEntry> actualValueAtLine) {
super(Opcodes.ASM9);
localVariableSlots = createInitialLocalVariableSlots(access, owner, name, methodDescriptor);
previousFrame =
FrameInfo.create(
ImmutableList.copyOf(localVariableSlots), ImmutableList.<StackEntry>of());
this.methodSignature = owner + "." + name + methodDescriptor;
this.actualValueAtLine = actualValueAtLine;
}
@Override
public void visitCode() {
checkState(!used, "Cannot reuse this method visitor.");
used = true;
super.visitCode();
}
@Override
public void visitEnd() {
if (seenJump) {
/*
* If there are multiple paths through a method, we'd have to examine them all and make sure
* that the values still match up. We could try someday, but it's hard.
*/
super.visitEnd();
return;
}
ImmutableSetMultimap<Label, Integer> lineNumbersAtLabel = this.lineNumbersAtLabel.build();
for (Entry<ImmutableList<Label>, StackEntry> e : actualValueAtLocation.build().entries()) {
for (int lineNumber : lineNumbers(e.getKey(), lineNumbersAtLabel)) {
actualValueAtLine.put(lineNumber, e.getValue());
}
}
super.visitEnd();
}
private static ImmutableSet<Integer> lineNumbers(
ImmutableList<Label> labels, ImmutableSetMultimap<Label, Integer> lineNumbersAtLabel) {
for (Label label : labels.reverse()) {
if (lineNumbersAtLabel.containsKey(label)) {
return lineNumbersAtLabel.get(label);
}
}
return ImmutableSet.of();
}
@Override
public void visitLineNumber(int line, Label start) {
lineNumbersAtLabel.put(start, line);
super.visitLineNumber(line, start);
}
@Override
public void visitLabel(Label label) {
labelsSeen.add(label);
super.visitLabel(label);
}
/** Returns the entry for the operand at the specified offset. 0 means the top of the stack. */
private StackEntry getOperandFromTop(int offsetFromTop) {
int index = operandStack.size() - 1 - offsetFromTop;
checkState(
index >= 0,
"Invalid offset %s in the list of size %s. The current method is %s",
offsetFromTop,
operandStack.size(),
methodSignature);
return operandStack.get(index);
}
@Override
public void visitInsn(int opcode) {
switch (opcode) {
case Opcodes.NOP:
case Opcodes.INEG:
case Opcodes.LNEG:
case Opcodes.FNEG:
case Opcodes.DNEG:
case Opcodes.I2B:
case Opcodes.I2C:
case Opcodes.I2S:
case Opcodes.RETURN:
break;
case Opcodes.ACONST_NULL:
push(InferredType.NULL);
break;
case Opcodes.ICONST_M1:
case Opcodes.ICONST_0:
case Opcodes.ICONST_1:
case Opcodes.ICONST_2:
case Opcodes.ICONST_3:
case Opcodes.ICONST_4:
case Opcodes.ICONST_5:
push(InferredType.INT);
break;
case Opcodes.LCONST_0:
case Opcodes.LCONST_1:
push(InferredType.LONG);
push(InferredType.TOP);
break;
case Opcodes.FCONST_0:
case Opcodes.FCONST_1:
case Opcodes.FCONST_2:
push(InferredType.FLOAT);
break;
case Opcodes.DCONST_0:
case Opcodes.DCONST_1:
push(InferredType.DOUBLE);
push(InferredType.TOP);
break;
case Opcodes.IALOAD:
case Opcodes.BALOAD:
case Opcodes.CALOAD:
case Opcodes.SALOAD:
pop(2);
push(InferredType.INT);
break;
case Opcodes.LALOAD:
case Opcodes.D2L:
pop(2);
push(InferredType.LONG);
push(InferredType.TOP);
break;
case Opcodes.DALOAD:
case Opcodes.L2D:
pop(2);
push(InferredType.DOUBLE);
push(InferredType.TOP);
break;
case Opcodes.AALOAD:
InferredType arrayType = pop(2).type();
InferredType elementType = arrayType.getElementTypeIfArrayOrThrow();
push(elementType);
break;
case Opcodes.IASTORE:
case Opcodes.BASTORE:
case Opcodes.CASTORE:
case Opcodes.SASTORE:
case Opcodes.FASTORE:
case Opcodes.AASTORE:
pop(3);
break;
case Opcodes.LASTORE:
case Opcodes.DASTORE:
pop(4);
break;
case Opcodes.POP:
case Opcodes.IRETURN:
case Opcodes.FRETURN:
case Opcodes.ARETURN:
case Opcodes.ATHROW:
case Opcodes.MONITORENTER:
case Opcodes.MONITOREXIT:
pop();
break;
case Opcodes.POP2:
case Opcodes.LRETURN:
case Opcodes.DRETURN:
pop(2);
break;
case Opcodes.DUP:
push(top());
break;
case Opcodes.DUP_X1:
{
StackEntry top = pop();
StackEntry next = pop();
push(top);
push(next);
push(top);
break;
}
case Opcodes.DUP_X2:
{
StackEntry top = pop();
StackEntry next = pop();
StackEntry bottom = pop();
push(top);
push(bottom);
push(next);
push(top);
break;
}
case Opcodes.DUP2:
{
StackEntry top = pop();
StackEntry next = pop();
push(next);
push(top);
push(next);
push(top);
break;
}
case Opcodes.DUP2_X1:
{
StackEntry top = pop();
StackEntry next = pop();
StackEntry bottom = pop();
push(next);
push(top);
push(bottom);
push(next);
push(top);
break;
}
case Opcodes.DUP2_X2:
{
StackEntry t1 = pop();
StackEntry t2 = pop();
StackEntry t3 = pop();
StackEntry t4 = pop();
push(t2);
push(t1);
push(t4);
push(t3);
push(t2);
push(t1);
break;
}
case Opcodes.SWAP:
{
StackEntry top = pop();
StackEntry next = pop();
push(top);
push(next);
break;
}
case Opcodes.IADD:
case Opcodes.ISUB:
case Opcodes.IMUL:
case Opcodes.IDIV:
case Opcodes.IREM:
case Opcodes.ISHL:
case Opcodes.ISHR:
case Opcodes.IUSHR:
case Opcodes.IAND:
case Opcodes.IOR:
case Opcodes.IXOR:
case Opcodes.L2I:
case Opcodes.D2I:
case Opcodes.FCMPL:
case Opcodes.FCMPG:
pop(2);
push(InferredType.INT);
break;
case Opcodes.LADD:
case Opcodes.LSUB:
case Opcodes.LMUL:
case Opcodes.LDIV:
case Opcodes.LREM:
case Opcodes.LAND:
case Opcodes.LOR:
case Opcodes.LXOR:
pop(4);
push(InferredType.LONG);
push(InferredType.TOP);
break;
case Opcodes.LSHL:
case Opcodes.LSHR:
case Opcodes.LUSHR:
pop(3);
push(InferredType.LONG);
push(InferredType.TOP);
break;
case Opcodes.I2L:
case Opcodes.F2L:
pop();
push(InferredType.LONG);
push(InferredType.TOP);
break;
case Opcodes.I2F:
pop();
push(InferredType.FLOAT);
break;
case Opcodes.LCMP:
case Opcodes.DCMPG:
case Opcodes.DCMPL:
pop(4);
push(InferredType.INT);
break;
case Opcodes.I2D:
case Opcodes.F2D:
pop();
push(InferredType.DOUBLE);
push(InferredType.TOP);
break;
case Opcodes.F2I:
case Opcodes.ARRAYLENGTH:
pop();
push(InferredType.INT);
break;
case Opcodes.FALOAD:
case Opcodes.FADD:
case Opcodes.FSUB:
case Opcodes.FMUL:
case Opcodes.FDIV:
case Opcodes.FREM:
case Opcodes.L2F:
case Opcodes.D2F:
pop(2);
push(InferredType.FLOAT);
break;
case Opcodes.DADD:
case Opcodes.DSUB:
case Opcodes.DMUL:
case Opcodes.DDIV:
case Opcodes.DREM:
pop(4);
push(InferredType.DOUBLE);
push(InferredType.TOP);
break;
default:
throw new RuntimeException("Unhandled opcode " + opcode);
}
super.visitInsn(opcode);
}
@Override
public void visitIntInsn(int opcode, int operand) {
switch (opcode) {
case Opcodes.BIPUSH:
case Opcodes.SIPUSH:
push(InferredType.INT);
break;
case Opcodes.NEWARRAY:
pop();
switch (operand) {
case Opcodes.T_BOOLEAN:
pushDescriptor("[Z");
break;
case Opcodes.T_CHAR:
pushDescriptor("[C");
break;
case Opcodes.T_FLOAT:
pushDescriptor("[F");
break;
case Opcodes.T_DOUBLE:
pushDescriptor("[D");
break;
case Opcodes.T_BYTE:
pushDescriptor("[B");
break;
case Opcodes.T_SHORT:
pushDescriptor("[S");
break;
case Opcodes.T_INT:
pushDescriptor("[I");
break;
case Opcodes.T_LONG:
pushDescriptor("[J");
break;
default:
throw new RuntimeException("Unhandled operand value: " + operand);
}
break;
default:
throw new RuntimeException("Unhandled opcode " + opcode);
}
super.visitIntInsn(opcode, operand);
}
@Override
public void visitVarInsn(int opcode, int var) {
switch (opcode) {
case Opcodes.ILOAD:
push(InferredType.INT);
break;
case Opcodes.LLOAD:
push(InferredType.LONG);
push(InferredType.TOP);
break;
case Opcodes.FLOAD:
push(InferredType.FLOAT);
break;
case Opcodes.DLOAD:
push(InferredType.DOUBLE);
push(InferredType.TOP);
break;
case Opcodes.ALOAD:
push(getLocalVariable(var));
break;
case Opcodes.ISTORE:
case Opcodes.FSTORE:
case Opcodes.ASTORE:
{
StackEntry entry = pop();
setLocalVariable(var, entry);
break;
}
case Opcodes.LSTORE:
case Opcodes.DSTORE:
{
StackEntry entry = pop(2);
setLocalVariable(var, entry);
setLocalVariable(var + 1, opaque(InferredType.TOP));
break;
}
case Opcodes.RET:
throw new RuntimeException("The instruction RET is not supported");
default:
throw new RuntimeException("Unhandled opcode " + opcode);
}
super.visitVarInsn(opcode, var);
}
@Override
public void visitTypeInsn(int opcode, String type) {
String descriptor = convertToDescriptor(type);
switch (opcode) {
case Opcodes.NEW:
// This should be UNINITIALIZED(label). Okay for type inference.
pushDescriptor(descriptor);
break;
case Opcodes.ANEWARRAY:
pop();
pushDescriptor('[' + descriptor);
break;
case Opcodes.CHECKCAST:
push(pop().withType(InferredType.create(convertToDescriptor(type))));
break;
case Opcodes.INSTANCEOF:
pop();
push(InferredType.INT);
break;
default:
throw new RuntimeException("Unhandled opcode " + opcode);
}
super.visitTypeInsn(opcode, type);
}
@Override
public void visitFieldInsn(int opcode, String owner, String name, String desc) {
switch (opcode) {
case Opcodes.GETSTATIC:
pushDescriptor(desc);
break;
case Opcodes.PUTSTATIC:
popDescriptor(desc);
break;
case Opcodes.GETFIELD:
pop();
pushDescriptor(desc);
break;
case Opcodes.PUTFIELD:
popDescriptor(desc);
pop();
break;
default:
throw new RuntimeException(
"Unhandled opcode "
+ opcode
+ ", owner="
+ owner
+ ", name="
+ name
+ ", desc"
+ desc);
}
super.visitFieldInsn(opcode, owner, name, desc);
}
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
if (opcode == Opcodes.INVOKESPECIAL && name.equals("<init>")) {
int argumentSize = (Type.getArgumentsAndReturnSizes(desc) >> 2);
InferredType receiverType = getOperandFromTop(argumentSize - 1).type();
if (receiverType.isUninitialized()) {
InferredType realType = InferredType.create('L' + owner + ';');
replaceUninitializedTypeInStack(receiverType, realType);
}
}
switch (opcode) {
case Opcodes.INVOKESPECIAL:
case Opcodes.INVOKEVIRTUAL:
case Opcodes.INVOKESTATIC:
case Opcodes.INVOKEINTERFACE:
Invocation.Builder invocation = Invocation.builder(name);
if (isThatOrAssertThat(owner, name)) {
invocation.setActualValue(getOperandFromTop(0));
} else if (isBoxing(owner, name, desc)) {
invocation.setBoxingInput(
// double and long are represented by a TOP with the "real" value under it.
getOperandFromTop(0).type() == InferredType.TOP
? getOperandFromTop(1)
: getOperandFromTop(0));
}
popDescriptor(desc);
if (opcode != Opcodes.INVOKESTATIC) {
invocation.setReceiver(pop());
}
pushDescriptorAndMaybeProcessMethodCall(desc, invocation.build());
break;
default:
throw new RuntimeException(
String.format(
"Unhandled opcode %s, owner=%s, name=%s, desc=%s, itf=%s",
opcode, owner, name, desc, itf));
}
super.visitMethodInsn(opcode, owner, name, desc, itf);
}
@Override
public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
popDescriptor(desc);
pushDescriptor(desc);
super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
}
@Override
public void visitJumpInsn(int opcode, Label label) {
seenJump = true;
switch (opcode) {
case Opcodes.IFEQ:
case Opcodes.IFNE:
case Opcodes.IFLT:
case Opcodes.IFGE:
case Opcodes.IFGT:
case Opcodes.IFLE:
pop();
break;
case Opcodes.IF_ICMPEQ:
case Opcodes.IF_ICMPNE:
case Opcodes.IF_ICMPLT:
case Opcodes.IF_ICMPGE:
case Opcodes.IF_ICMPGT:
case Opcodes.IF_ICMPLE:
case Opcodes.IF_ACMPEQ:
case Opcodes.IF_ACMPNE:
pop(2);
break;
case Opcodes.GOTO:
break;
case Opcodes.JSR:
throw new RuntimeException("The JSR instruction is not supported.");
case Opcodes.IFNULL:
case Opcodes.IFNONNULL:
pop(1);
break;
default:
throw new RuntimeException("Unhandled opcode " + opcode);
}
super.visitJumpInsn(opcode, label);
}
@Override
public void visitLdcInsn(Object cst) {
if (cst instanceof Integer) {
push(InferredType.INT);
} else if (cst instanceof Float) {
push(InferredType.FLOAT);
} else if (cst instanceof Long) {
push(InferredType.LONG);
push(InferredType.TOP);
} else if (cst instanceof Double) {
push(InferredType.DOUBLE);
push(InferredType.TOP);
} else if (cst instanceof String) {
pushDescriptor("Ljava/lang/String;");
} else if (cst instanceof Type) {
pushDescriptor(((Type) cst).getDescriptor());
} else if (cst instanceof Handle) {
pushDescriptor("Ljava/lang/invoke/MethodHandle;");
} else {
throw new RuntimeException("Cannot handle constant " + cst + " for LDC instruction");
}
super.visitLdcInsn(cst);
}
@Override
public void visitIincInsn(int var, int increment) {
setLocalVariable(var, opaque(InferredType.INT));
super.visitIincInsn(var, increment);
}
@Override
public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) {
seenJump = true;
pop();
super.visitTableSwitchInsn(min, max, dflt, labels);
}
@Override
public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {
seenJump = true;
pop();
super.visitLookupSwitchInsn(dflt, keys, labels);
}
@Override
public void visitTryCatchBlock(Label start, Label end, Label handler, String type) {
/*
* Inference already fails for at least some try-catch blocks, apparently because of the extra
* frames they create. Still, let's disable inference explicitly.
*/
seenJump = true;
super.visitTryCatchBlock(start, end, handler, type);
}
@Override
public void visitMultiANewArrayInsn(String desc, int dims) {
pop(dims);
pushDescriptor(desc);
super.visitMultiANewArrayInsn(desc, dims);
}
@Override
public void visitFrame(int type, int nLocal, Object[] local, int nStack, Object[] stack) {
switch (type) {
case Opcodes.F_NEW:
// Expanded form.
previousFrame =
FrameInfo.create(
convertTypesInStackMapFrame(nLocal, local),
convertTypesInStackMapFrame(nStack, stack));
break;
case Opcodes.F_SAME:
// This frame type indicates that the frame has exactly the same local variables as the
// previous frame and that the operand stack is empty.
previousFrame = FrameInfo.create(previousFrame.locals(), ImmutableList.<StackEntry>of());
break;
case Opcodes.F_SAME1:
// This frame type indicates that the frame has exactly the same local variables as the
// previous frame and that the operand stack has one entry.
previousFrame =
FrameInfo.create(previousFrame.locals(), convertTypesInStackMapFrame(nStack, stack));
break;
case Opcodes.F_APPEND:
// This frame type indicates that the frame has the same locals as the previous frame
// except that k additional locals are defined, and that the operand stack is empty.
previousFrame =
FrameInfo.create(
appendArrayToList(previousFrame.locals(), nLocal, local),
ImmutableList.<StackEntry>of());
break;
case Opcodes.F_CHOP:
// This frame type indicates that the frame has the same local variables as the previous
// frame except that the last k local variables are absent, and that the operand stack is
// empty.
previousFrame =
FrameInfo.create(
removeBackFromList(previousFrame.locals(), nLocal),
ImmutableList.<StackEntry>of());
break;
case Opcodes.F_FULL:
previousFrame =
FrameInfo.create(
convertTypesInStackMapFrame(nLocal, local),
convertTypesInStackMapFrame(nStack, stack));
break;
default:
// continue below
}
// Update types for operand stack and local variables.
operandStack.clear();
operandStack.addAll(previousFrame.stack());
localVariableSlots.clear();
localVariableSlots.addAll(previousFrame.locals());
super.visitFrame(type, nLocal, local, nStack, stack);
}
private static String convertToDescriptor(String type) {
return (type.length() > 1 && type.charAt(0) != '[') ? 'L' + type + ';' : type;
}
private void push(InferredType type) {
push(opaque(type));
}
private void push(StackEntry entry) {
operandStack.add(entry);
}
private void replaceUninitializedTypeInStack(InferredType oldType, InferredType newType) {
checkArgument(oldType.isUninitialized(), "The old type is NOT uninitialized. %s", oldType);
for (int i = 0, size = operandStack.size(); i < size; ++i) {
InferredType type = operandStack.get(i).type();
if (type.equals(oldType)) {
operandStack.set(i, opaque(newType));
}
}
}
private void pushDescriptor(String desc) {
pushDescriptorAndMaybeProcessMethodCall(desc, /* invocation= */ null);
}
/**
* Pushes entries onto the stack for the given arguments, and, if the descriptor is for a method
* call, records the assertion made by that call (if any).
*
* <p>If the descriptor is for a call, this method not only records the assertion made by it (if
* any) but also examines its parameters to generate more detailed stack entries.
*
* @param desc the descriptor of the type to be added to the stack (or the descriptor of the
* method whose return value is to be added to the stack)
* @param invocation the method invocation being visited, or {@code null} if a non-method
* descriptor is being visited
*/
private void pushDescriptorAndMaybeProcessMethodCall(
String desc, @Nullable Invocation invocation) {
if (invocation != null && invocation.isOnSubjectInstance()) {
actualValueAtLocation.put(
labelsSeen.build(), checkNotNull(invocation.receiver()).actualValue());
}
boolean hasParams = invocation != null && (Type.getArgumentsAndReturnSizes(desc) >> 2) > 1;
int index = desc.charAt(0) == '(' ? desc.indexOf(')') + 1 : 0;
switch (desc.charAt(index)) {
case 'V':
return;
case 'Z':
case 'C':
case 'B':
case 'S':
case 'I':
pushMaybeDescribed(InferredType.INT, invocation, hasParams);
break;
case 'F':
pushMaybeDescribed(InferredType.FLOAT, invocation, hasParams);
break;
case 'D':
pushMaybeDescribed(InferredType.DOUBLE, invocation, hasParams);
push(InferredType.TOP);
break;
case 'J':
pushMaybeDescribed(InferredType.LONG, invocation, hasParams);
push(InferredType.TOP);
break;
case 'L':
case '[':
pushMaybeDescribed(InferredType.create(desc.substring(index)), invocation, hasParams);
break;
default:
throw new RuntimeException("Unhandled type: " + desc);
}
}
private void pushMaybeDescribed(
InferredType type, @Nullable Invocation invocation, boolean hasParams) {
push(invocation == null ? opaque(type) : invocation.deriveEntry(type, hasParams));
}
@CanIgnoreReturnValue
private StackEntry pop() {
return pop(1);
}
/** Pop elements from the end of the operand stack, and return the last popped element. */
@CanIgnoreReturnValue
private StackEntry pop(int count) {
checkArgument(
count >= 1, "The count should be at least one: %s (In %s)", count, methodSignature);
checkState(
operandStack.size() >= count,
"There are no enough elements in the stack. count=%s, stack=%s (In %s)",
count,
operandStack,
methodSignature);
int expectedLastIndex = operandStack.size() - count - 1;
StackEntry lastPopped;
do {
lastPopped = operandStack.remove(operandStack.size() - 1);
} while (operandStack.size() - 1 > expectedLastIndex);
return lastPopped;
}
private void popDescriptor(String desc) {
char c = desc.charAt(0);
switch (c) {
case '(':
int argumentSize = (Type.getArgumentsAndReturnSizes(desc) >> 2) - 1;
if (argumentSize > 0) {
pop(argumentSize);
}
break;
case 'J':
case 'D':
pop(2);
break;
default:
pop(1);
break;
}
}
private StackEntry getLocalVariable(int index) {
checkState(
index < localVariableSlots.size(),
"Cannot find type for var %s in method %s",
index,
methodSignature);
return localVariableSlots.get(index);
}
private void setLocalVariable(int index, StackEntry entry) {
while (localVariableSlots.size() <= index) {
localVariableSlots.add(opaque(InferredType.TOP));
}
localVariableSlots.set(index, entry);
}
private StackEntry top() {
return Iterables.getLast(operandStack);
}
/**
* Create the slots for local variables at the very beginning of the method with the information
* of the declaring | visitor |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/socket/WebSocketIntegrationTests.java | {
"start": 8339,
"end": 8724
} | class ____ implements WebSocketHandler {
@Override
public Mono<Void> handle(WebSocketSession session) {
HttpHeaders headers = session.getHandshakeInfo().getHeaders();
String payload = "my-header:" + headers.getFirst("my-header");
WebSocketMessage message = session.textMessage(payload);
return session.send(Mono.just(message));
}
}
private static | CustomHeaderHandler |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcReply.java | {
"start": 1087,
"end": 1179
} | class ____ extends RpcMessage {
/** RPC reply_stat as defined in RFC 1831 */
public | RpcReply |
java | quarkusio__quarkus | extensions/smallrye-health/deployment/src/test/java/io/quarkus/smallrye/health/test/DisableHealthCheckTest.java | {
"start": 341,
"end": 1128
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(BasicHealthCheck.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"))
.overrideConfigKey("quarkus.smallrye-health.check.\""
+ BasicHealthCheck.class.getName() + "\".enabled", "false");
@Test
void testHealthCheckDisabled() {
try {
RestAssured.defaultParser = Parser.JSON;
RestAssured.when().get("/q/health").then()
.body("status", is("UP"),
"checks.size()", is(0));
} finally {
RestAssured.reset();
}
}
}
| DisableHealthCheckTest |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/io/InputStreamFSInputWrapperTest.java | {
"start": 1050,
"end": 1797
} | class ____ {
@Test
void testClose() throws Exception {
final AtomicBoolean closeCalled = new AtomicBoolean(false);
InputStream mockedInputStream =
new InputStream() {
@Override
public int read() {
return 0;
}
@Override
public void close() throws IOException {
closeCalled.set(true);
super.close();
}
};
InputStreamFSInputWrapper wrapper = new InputStreamFSInputWrapper(mockedInputStream);
wrapper.close();
assertThat(closeCalled).isTrue();
}
}
| InputStreamFSInputWrapperTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/TaggedInputSplit.java | {
"start": 2690,
"end": 2837
} | class ____ use
*/
public Class<? extends InputFormat> getInputFormatClass() {
return inputFormatClass;
}
/**
* Retrieves the Mapper | to |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/JdbcTypeRegistrations.java | {
"start": 645,
"end": 714
} | interface ____ {
JdbcTypeRegistration[] value();
}
| JdbcTypeRegistrations |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/builder/BuilderWithCreatorTest.java | {
"start": 2828,
"end": 2968
} | class ____
{
final double value;
protected DoubleCreatorValue(double v) { value = v; }
}
static | DoubleCreatorValue |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/serializer/BigDecimalCodec.java | {
"start": 1092,
"end": 3816
} | class ____ implements ObjectSerializer, ObjectDeserializer {
final static BigDecimal LOW = BigDecimal.valueOf(-9007199254740991L);
final static BigDecimal HIGH = BigDecimal.valueOf(9007199254740991L);
public final static BigDecimalCodec instance = new BigDecimalCodec();
public void write(JSONSerializer serializer, Object object, Object fieldName, Type fieldType, int features) throws IOException {
SerializeWriter out = serializer.out;
if (object == null) {
out.writeNull(SerializerFeature.WriteNullNumberAsZero);
} else {
BigDecimal val = (BigDecimal) object;
int scale = val.scale();
String outText;
if (SerializerFeature.isEnabled(features, out.features, SerializerFeature.WriteBigDecimalAsPlain)
&& scale >= -100 && scale < 100) {
outText = val.toPlainString();
} else {
outText = val.toString();
}
if (scale == 0) {
if (outText.length() >= 16
&& SerializerFeature.isEnabled(features, out.features, SerializerFeature.BrowserCompatible)
&& (val.compareTo(LOW) < 0
|| val.compareTo(HIGH) > 0))
{
out.writeString(outText);
return;
}
}
out.write(outText);
if (out.isEnabled(SerializerFeature.WriteClassName) && fieldType != BigDecimal.class && val.scale() == 0) {
out.write('.');
}
}
}
@SuppressWarnings("unchecked")
public <T> T deserialze(DefaultJSONParser parser, Type clazz, Object fieldName) {
try {
return (T) deserialze(parser);
} catch (Exception ex) {
throw new JSONException("parseDecimal error, field : " + fieldName, ex);
}
}
@SuppressWarnings("unchecked")
public static <T> T deserialze(DefaultJSONParser parser) {
final JSONLexer lexer = parser.lexer;
if (lexer.token() == JSONToken.LITERAL_INT) {
BigDecimal decimalValue = lexer.decimalValue();
lexer.nextToken(JSONToken.COMMA);
return (T) decimalValue;
}
if (lexer.token() == JSONToken.LITERAL_FLOAT) {
BigDecimal val = lexer.decimalValue();
lexer.nextToken(JSONToken.COMMA);
return (T) val;
}
Object value = parser.parse();
return value == null //
? null //
: (T) TypeUtils.castToBigDecimal(value);
}
public int getFastMatchToken() {
return JSONToken.LITERAL_INT;
}
}
| BigDecimalCodec |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/manytomany/ManyToManySQLJoinTableRestrictionTest.java | {
"start": 6797,
"end": 7081
} | class ____ {
@Id
@Column( name = "project_id" )
private String projectId;
@Id
@Column( name = "user_id" )
private String userId;
@Id
@Column( name = "role_name" )
private String role;
}
@Entity( name = "User" )
@Table( name = "t_user" )
public static | ProjectUsers |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/authentication/OAuth2TokenRevocationAuthenticationToken.java | {
"start": 1289,
"end": 3306
} | class ____ extends AbstractAuthenticationToken {
@Serial
private static final long serialVersionUID = -880609099230203249L;
private final String token;
private final Authentication clientPrincipal;
private final String tokenTypeHint;
/**
* Constructs an {@code OAuth2TokenRevocationAuthenticationToken} using the provided
* parameters.
* @param token the token
* @param clientPrincipal the authenticated client principal
* @param tokenTypeHint the token type hint
*/
public OAuth2TokenRevocationAuthenticationToken(String token, Authentication clientPrincipal,
@Nullable String tokenTypeHint) {
super(Collections.emptyList());
Assert.hasText(token, "token cannot be empty");
Assert.notNull(clientPrincipal, "clientPrincipal cannot be null");
this.token = token;
this.clientPrincipal = clientPrincipal;
this.tokenTypeHint = tokenTypeHint;
}
/**
* Constructs an {@code OAuth2TokenRevocationAuthenticationToken} using the provided
* parameters.
* @param revokedToken the revoked token
* @param clientPrincipal the authenticated client principal
*/
public OAuth2TokenRevocationAuthenticationToken(OAuth2Token revokedToken, Authentication clientPrincipal) {
super(Collections.emptyList());
Assert.notNull(revokedToken, "revokedToken cannot be null");
Assert.notNull(clientPrincipal, "clientPrincipal cannot be null");
this.token = revokedToken.getTokenValue();
this.clientPrincipal = clientPrincipal;
this.tokenTypeHint = null;
setAuthenticated(true); // Indicates that the token was authenticated and revoked
}
@Override
public Object getPrincipal() {
return this.clientPrincipal;
}
@Override
public Object getCredentials() {
return "";
}
/**
* Returns the token.
* @return the token
*/
public String getToken() {
return this.token;
}
/**
* Returns the token type hint.
* @return the token type hint
*/
@Nullable
public String getTokenTypeHint() {
return this.tokenTypeHint;
}
}
| OAuth2TokenRevocationAuthenticationToken |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CheckReturnValueTest.java | {
"start": 36388,
"end": 36882
} | class ____ {
void foo() {
makeBarOrThrow();
}
@CheckReturnValue
String makeBarOrThrow() {
throw new UnsupportedOperationException();
}
}
""")
.addOutputLines(
"Test.java",
"""
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.CheckReturnValue;
| Test |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/assertj/AbstractMockHttpServletResponseAssertTests.java | {
"start": 1303,
"end": 4958
} | class ____ {
@Test
void bodyText() {
MockHttpServletResponse response = createResponse("OK");
assertThat(fromResponse(response)).bodyText().isEqualTo("OK");
}
@Test
void bodyJsonWithJsonPath() {
MockHttpServletResponse response = createResponse("{\"albumById\": {\"name\": \"Greatest hits\"}}");
assertThat(fromResponse(response)).bodyJson()
.extractingPath("$.albumById.name").isEqualTo("Greatest hits");
}
@Test
void bodyJsonCanLoadResourceRelativeToClass() {
MockHttpServletResponse response = createResponse("{ \"name\" : \"Spring\", \"age\" : 123 }");
// See org/springframework/test/json/example.json
assertThat(fromResponse(response)).bodyJson().withResourceLoadClass(JsonContent.class)
.isLenientlyEqualTo("example.json");
}
@Test
void bodyWithByteArray() throws UnsupportedEncodingException {
byte[] bytes = "OK".getBytes(StandardCharsets.UTF_8);
MockHttpServletResponse response = new MockHttpServletResponse();
response.getWriter().write("OK");
response.setContentType(StandardCharsets.UTF_8.name());
assertThat(fromResponse(response)).body().isEqualTo(bytes);
}
@Test
void hasBodyTextEqualTo() throws UnsupportedEncodingException {
MockHttpServletResponse response = new MockHttpServletResponse();
response.getWriter().write("OK");
response.setContentType(StandardCharsets.UTF_8.name());
assertThat(fromResponse(response)).hasBodyTextEqualTo("OK");
}
@Test
void hasForwardedUrl() {
String forwardedUrl = "https://example.com/42";
MockHttpServletResponse response = new MockHttpServletResponse();
response.setForwardedUrl(forwardedUrl);
assertThat(fromResponse(response)).hasForwardedUrl(forwardedUrl);
}
@Test
void hasForwardedUrlWithWrongValue() {
String forwardedUrl = "https://example.com/42";
MockHttpServletResponse response = new MockHttpServletResponse();
response.setForwardedUrl(forwardedUrl);
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(fromResponse(response)).hasForwardedUrl("another"))
.withMessageContainingAll("Forwarded URL", forwardedUrl, "another");
}
@Test
void hasRedirectedUrl() {
String redirectedUrl = "https://example.com/42";
MockHttpServletResponse response = new MockHttpServletResponse();
response.addHeader(HttpHeaders.LOCATION, redirectedUrl);
assertThat(fromResponse(response)).hasRedirectedUrl(redirectedUrl);
}
@Test
void hasRedirectedUrlWithWrongValue() {
String redirectedUrl = "https://example.com/42";
MockHttpServletResponse response = new MockHttpServletResponse();
response.addHeader(HttpHeaders.LOCATION, redirectedUrl);
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(fromResponse(response)).hasRedirectedUrl("another"))
.withMessageContainingAll("Redirected URL", redirectedUrl, "another");
}
@Test
void hasServletErrorMessage() throws Exception{
MockHttpServletResponse response = new MockHttpServletResponse();
response.sendError(403, "expected error message");
assertThat(fromResponse(response)).hasErrorMessage("expected error message");
}
private MockHttpServletResponse createResponse(String body) {
try {
MockHttpServletResponse response = new MockHttpServletResponse();
response.setContentType(StandardCharsets.UTF_8.name());
response.getWriter().write(body);
return response;
}
catch (UnsupportedEncodingException ex) {
throw new IllegalStateException(ex);
}
}
private static AssertProvider<ResponseAssert> fromResponse(MockHttpServletResponse response) {
return () -> new ResponseAssert(response);
}
private static final | AbstractMockHttpServletResponseAssertTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java | {
"start": 20616,
"end": 21427
} | class ____ {
private static final MethodHandle NEW_CRC32C_MH;
static {
MethodHandle newCRC32C = null;
try {
newCRC32C = MethodHandles.publicLookup()
.findConstructor(
Class.forName("java.util.zip.CRC32C"),
MethodType.methodType(void.class)
);
} catch (ReflectiveOperationException e) {
// Should not reach here.
throw new RuntimeException(e);
}
NEW_CRC32C_MH = newCRC32C;
}
public static Checksum createChecksum() {
try {
// Should throw nothing
return (Checksum) NEW_CRC32C_MH.invoke();
} catch (Throwable t) {
throw (t instanceof RuntimeException) ? (RuntimeException) t
: new RuntimeException(t);
}
}
};
}
| Java9Crc32CFactory |
java | spring-projects__spring-boot | integration-test/spring-boot-actuator-integration-tests/src/test/java/org/springframework/boot/actuate/sbom/SbomEndpointWebIntegrationTests.java | {
"start": 1249,
"end": 1741
} | class ____ {
@WebEndpointTest
void shouldReturnSboms(WebTestClient client) {
client.get()
.uri("/actuator/sbom")
.exchange()
.expectStatus()
.isOk()
.expectHeader()
.contentType(MediaType.parseMediaType("application/vnd.spring-boot.actuator.v3+json"))
.expectBody()
.jsonPath("$.ids")
.value((value) -> assertThat(value).isEqualTo(new JSONArray().appendElement("application")));
}
@Configuration(proxyBeanMethods = false)
static | SbomEndpointWebIntegrationTests |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/catalog/Column.java | {
"start": 6202,
"end": 7413
} | class ____ extends Column {
private PhysicalColumn(String name, DataType dataType) {
this(name, dataType, null);
}
private PhysicalColumn(String name, DataType dataType, String comment) {
super(name, dataType, comment);
}
@Override
public PhysicalColumn withComment(String comment) {
if (comment == null) {
return this;
}
return new PhysicalColumn(name, dataType, comment);
}
@Override
public boolean isPhysical() {
return true;
}
@Override
public boolean isPersisted() {
return true;
}
@Override
public Optional<String> explainExtras() {
return Optional.empty();
}
@Override
public Column copy(DataType newDataType) {
return new PhysicalColumn(name, newDataType, comment);
}
@Override
public Column rename(String newName) {
return new PhysicalColumn(newName, dataType, comment);
}
}
/** Representation of a computed column. */
@PublicEvolving
public static final | PhysicalColumn |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java | {
"start": 2688,
"end": 16816
} | class ____ extends ESIntegTestCase {
@Override
protected int numberOfReplicas() {
return 2;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singletonList(MockTransportService.TestPlugin.class);
}
public void testNoMasterActions() throws Exception {
Settings settings = Settings.builder()
.put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true)
.put(NoMasterBlockService.NO_MASTER_BLOCK_SETTING.getKey(), "all")
.build();
final TimeValue timeout = TimeValue.timeValueMillis(10);
final List<String> nodes = internalCluster().startNodes(3, settings);
createIndex("test");
clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT, "test").setWaitForGreenStatus().get();
final NetworkDisruption disruptionScheme = new NetworkDisruption(
new IsolateAllNodes(new HashSet<>(nodes)),
NetworkDisruption.DISCONNECT
);
internalCluster().setDisruptionScheme(disruptionScheme);
disruptionScheme.startDisrupting();
final String masterlessNode = internalCluster().getRandomNodeName();
final Client clientToMasterlessNode = client(masterlessNode);
awaitClusterState(masterlessNode, state -> state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID));
assertRequestBuilderThrows(
clientToMasterlessNode.prepareGet("test", "1"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
clientToMasterlessNode.prepareGet("no_index", "1"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
clientToMasterlessNode.prepareMultiGet().add("test", "1"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
clientToMasterlessNode.prepareMultiGet().add("no_index", "1"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
clientToMasterlessNode.admin().indices().prepareAnalyze("test", "this is a test"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
clientToMasterlessNode.admin().indices().prepareAnalyze("no_index", "this is a test"),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
clientToMasterlessNode.prepareSearch("test").setSize(0),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
assertRequestBuilderThrows(
clientToMasterlessNode.prepareSearch("no_index").setSize(0),
ClusterBlockException.class,
RestStatus.SERVICE_UNAVAILABLE
);
checkUpdateAction(
false,
timeout,
clientToMasterlessNode.prepareUpdate("test", "1")
.setScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", Collections.emptyMap()))
.setTimeout(timeout)
);
checkUpdateAction(
true,
timeout,
clientToMasterlessNode.prepareUpdate("no_index", "1")
.setScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", Collections.emptyMap()))
.setTimeout(timeout)
);
checkWriteAction(
clientToMasterlessNode.prepareIndex("test")
.setId("1")
.setSource(XContentFactory.jsonBuilder().startObject().endObject())
.setTimeout(timeout)
);
checkWriteAction(
clientToMasterlessNode.prepareIndex("no_index")
.setId("1")
.setSource(XContentFactory.jsonBuilder().startObject().endObject())
.setTimeout(timeout)
);
BulkRequestBuilder bulkRequestBuilder = clientToMasterlessNode.prepareBulk();
bulkRequestBuilder.add(
clientToMasterlessNode.prepareIndex("test").setId("1").setSource(XContentFactory.jsonBuilder().startObject().endObject())
);
bulkRequestBuilder.add(
clientToMasterlessNode.prepareIndex("test").setId("2").setSource(XContentFactory.jsonBuilder().startObject().endObject())
);
bulkRequestBuilder.setTimeout(timeout);
checkWriteAction(bulkRequestBuilder);
bulkRequestBuilder = clientToMasterlessNode.prepareBulk();
bulkRequestBuilder.add(
clientToMasterlessNode.prepareIndex("no_index").setId("1").setSource(XContentFactory.jsonBuilder().startObject().endObject())
);
bulkRequestBuilder.add(
clientToMasterlessNode.prepareIndex("no_index").setId("2").setSource(XContentFactory.jsonBuilder().startObject().endObject())
);
bulkRequestBuilder.setTimeout(timeout);
checkWriteAction(bulkRequestBuilder);
internalCluster().clearDisruptionScheme(true);
}
void checkUpdateAction(boolean autoCreateIndex, TimeValue timeout, RequestBuilder<?, ?> builder) {
// we clean the metadata when loosing a master, therefore all operations on indices will auto create it, if allowed
try {
builder.get();
fail("expected ClusterBlockException or MasterNotDiscoveredException");
} catch (ClusterBlockException | MasterNotDiscoveredException e) {
if (e instanceof MasterNotDiscoveredException) {
assertTrue(autoCreateIndex);
} else {
assertFalse(autoCreateIndex);
}
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
}
void checkWriteAction(RequestBuilder<?, ?> builder) {
try {
builder.get();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
}
public void testNoMasterActionsWriteMasterBlock() throws Exception {
Settings settings = Settings.builder()
.put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false)
.put(NoMasterBlockService.NO_MASTER_BLOCK_SETTING.getKey(), "write")
.build();
final List<String> nodes = internalCluster().startNodes(3, settings);
prepareCreate("test1").setSettings(indexSettings(1, 2)).get();
prepareCreate("test2").setSettings(indexSettings(3, 0)).get();
clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT, "_all").setWaitForGreenStatus().get();
prepareIndex("test1").setId("1").setSource("field", "value1").get();
prepareIndex("test2").setId("1").setSource("field", "value1").get();
refresh();
ensureSearchable("test1", "test2");
ClusterStateResponse clusterState = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get();
logger.info("Cluster state:\n{}", clusterState.getState());
final NetworkDisruption disruptionScheme = new NetworkDisruption(
new IsolateAllNodes(new HashSet<>(nodes)),
NetworkDisruption.DISCONNECT
);
internalCluster().setDisruptionScheme(disruptionScheme);
disruptionScheme.startDisrupting();
final String masterlessNode = internalCluster().getRandomNodeName();
final Client clientToMasterlessNode = client(masterlessNode);
awaitClusterState(masterlessNode, state -> state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID));
GetResponse getResponse = clientToMasterlessNode.prepareGet("test1", "1").get();
assertExists(getResponse);
assertHitCount(
1L,
clientToMasterlessNode.prepareSearch("test1").setAllowPartialSearchResults(true).setSize(0),
clientToMasterlessNode.prepareSearch("test1").setAllowPartialSearchResults(true)
);
assertResponse(clientToMasterlessNode.prepareSearch("test2").setAllowPartialSearchResults(true).setSize(0), countResponse -> {
assertThat(countResponse.getTotalShards(), equalTo(3));
assertThat(countResponse.getSuccessfulShards(), equalTo(1));
});
TimeValue timeout = TimeValue.timeValueMillis(200);
long now = System.currentTimeMillis();
try {
clientToMasterlessNode.prepareUpdate("test1", "1")
.setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2")
.setTimeout(timeout)
.get();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(System.currentTimeMillis() - now, greaterThan(timeout.millis() - 50));
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
} catch (Exception e) {
logger.info("unexpected", e);
throw e;
}
try {
clientToMasterlessNode.prepareIndex("test1")
.setId("1")
.setSource(XContentFactory.jsonBuilder().startObject().endObject())
.setTimeout(timeout)
.get();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
internalCluster().clearDisruptionScheme(true);
}
public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception {
Settings settings = Settings.builder()
.put(NoMasterBlockService.NO_MASTER_BLOCK_SETTING.getKey(), "metadata_write")
.put(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.getKey(), "100ms")
.build();
final List<String> nodes = internalCluster().startNodes(3, settings);
prepareCreate("test1").setSettings(indexSettings(1, 1)).get();
clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT, "_all").setWaitForGreenStatus().get();
prepareIndex("test1").setId("1").setSource("field", "value1").get();
refresh();
ensureGreen("test1");
ClusterStateResponse clusterState = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get();
logger.info("Cluster state:\n{}", clusterState.getState());
final List<String> nodesWithShards = clusterState.getState()
.routingTable()
.index("test1")
.shard(0)
.activeShards()
.stream()
.map(shardRouting -> shardRouting.currentNodeId())
.map(nodeId -> clusterState.getState().nodes().resolveNode(nodeId))
.map(DiscoveryNode::getName)
.toList();
client().execute(
TransportAddVotingConfigExclusionsAction.TYPE,
new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, nodesWithShards.toArray(new String[0]))
).get();
ensureGreen("test1");
String partitionedNode = nodes.stream().filter(n -> nodesWithShards.contains(n) == false).findFirst().get();
final NetworkDisruption disruptionScheme = new NetworkDisruption(
new NetworkDisruption.TwoPartitions(Collections.singleton(partitionedNode), new HashSet<>(nodesWithShards)),
NetworkDisruption.DISCONNECT
);
internalCluster().setDisruptionScheme(disruptionScheme);
disruptionScheme.startDisrupting();
for (String node : nodesWithShards) {
awaitClusterState(node, state -> state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID));
}
GetResponse getResponse = client(randomFrom(nodesWithShards)).prepareGet("test1", "1").get();
assertExists(getResponse);
expectThrows(Exception.class, client(partitionedNode).prepareGet("test1", "1"));
assertHitCount(client(randomFrom(nodesWithShards)).prepareSearch("test1").setAllowPartialSearchResults(true).setSize(0), 1L);
expectThrows(Exception.class, client(partitionedNode).prepareSearch("test1").setAllowPartialSearchResults(true).setSize(0));
TimeValue timeout = TimeValue.timeValueMillis(200);
client(randomFrom(nodesWithShards)).prepareUpdate("test1", "1")
.setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2")
.setTimeout(timeout)
.get();
expectThrows(
Exception.class,
client(partitionedNode).prepareUpdate("test1", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").setTimeout(timeout)
);
client(randomFrom(nodesWithShards)).prepareIndex("test1")
.setId("1")
.setSource(XContentFactory.jsonBuilder().startObject().endObject())
.setTimeout(timeout)
.get();
// dynamic mapping updates fail
expectThrows(
MasterNotDiscoveredException.class,
client(randomFrom(nodesWithShards)).prepareIndex("test1")
.setId("1")
.setSource(XContentFactory.jsonBuilder().startObject().field("new_field", "value").endObject())
.setTimeout(timeout)
);
// dynamic index creation fails
expectThrows(
MasterNotDiscoveredException.class,
client(randomFrom(nodesWithShards)).prepareIndex("test2")
.setId("1")
.setSource(XContentFactory.jsonBuilder().startObject().endObject())
.setTimeout(timeout)
);
expectThrows(
Exception.class,
client(partitionedNode).prepareIndex("test1")
.setId("1")
.setSource(XContentFactory.jsonBuilder().startObject().endObject())
.setTimeout(timeout)
);
internalCluster().clearDisruptionScheme(true);
}
}
| NoMasterNodeIT |
java | elastic__elasticsearch | x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtWithOidcAuthIT.java | {
"start": 1867,
"end": 9353
} | class ____ extends C2IdOpTestCase {
// configured in the Elasticsearch node test fixture
private static final List<String> ALLOWED_AUDIENCES = List.of("elasticsearch-jwt1", "elasticsearch-jwt2");
private static final String JWT_FILE_REALM_NAME = "op-jwt";
private static final String JWT_PROXY_REALM_NAME = "op-jwt-proxy";
// Constants for role mapping
private static final String FILE_ROLE_NAME = "jwt_role";
private static final String FILE_SHARED_SECRET = "jwt-realm-shared-secret";
private static final String PROXY_ROLE_NAME = "jwt_proxy_role";
private static final String PROXY_SHARED_SECRET = "jwt-proxy-realm-shared-secret";
// Randomised values
private static String clientId;
private static String redirectUri;
/**
* Register an OIDC client so we can generate a JWT in C2id (which only supports dynamic configuration).
*/
@BeforeClass
public static void registerClient() throws IOException {
clientId = randomFrom(ALLOWED_AUDIENCES);
redirectUri = "https://" + randomAlphaOfLength(4) + ".rp.example.com/" + randomAlphaOfLength(6);
String clientSecret = randomAlphaOfLength(24);
String clientSetup = Strings.format("""
{
"grant_types": [ "implicit" ],
"response_types": [ "token id_token" ],
"preferred_client_id": "%s",
"preferred_client_secret": "%s",
"redirect_uris": [ "%s" ]
}""", clientId, clientSecret, redirectUri);
registerClients(clientSetup);
}
@Before
public void setupRoleMappings() throws Exception {
try (var restClient = getElasticsearchClient()) {
var client = new TestSecurityClient(restClient);
String mappingJson = Strings.format("""
{
"roles": [ "%s" ],
"enabled": true,
"rules": {
"all": [
{ "field": { "realm.name": "%s" } },
{ "field": { "metadata.jwt_claim_sub": "%s" } }
]
}
}
""", FILE_ROLE_NAME, JWT_FILE_REALM_NAME, TEST_SUBJECT_ID);
client.putRoleMapping(FILE_ROLE_NAME, mappingJson);
mappingJson = Strings.format("""
{
"roles": [ "%s" ],
"enabled": true,
"rules": {
"all": [
{ "field": { "realm.name": "%s" } },
{ "field": { "metadata.jwt_claim_sub": "%s" } }
]
}
}
""", PROXY_ROLE_NAME, JWT_PROXY_REALM_NAME, TEST_SUBJECT_ID);
client.putRoleMapping(PROXY_ROLE_NAME, mappingJson);
}
}
public void testAuthenticateWithOidcIssuedJwt() throws Exception {
final String state = randomAlphaOfLength(42);
final String nonce = randomAlphaOfLength(42);
final AuthenticationRequest oidcAuthRequest = new AuthenticationRequest.Builder(
new ResponseType("id_token", "token"),
new Scope(OIDCScopeValue.OPENID),
new ClientID(clientId),
new URI(redirectUri)
).endpointURI(new URI(c2id.getC2OPUrl() + "/c2id-login")).state(new State(state)).nonce(new Nonce(nonce)).build();
final String implicitFlowURI = authenticateAtOP(oidcAuthRequest.toURI());
assertThat("Hash value of URI should be a JWT", implicitFlowURI, Matchers.containsString("#"));
/*
* In OIDC's implicit flow, the JWT is provided in the URI as a hash fragment using form encoding
* (See Section 4.2.2 of the OAuth2 spec - https://www.rfc-editor.org/rfc/rfc6749.html#section-4.2.2)
* We're not trying to do OIDC - we're just trying to get an id_token shaped JWT from a real OIDC OP server (c2id) and use it
* to authenticated against our JWT realm.
* So, we extract the hash fragment, and decode it as a query string (which is not quite form encoding, but does the job).
* The three-part-encoded JWT id_token will be in the "id_token" field
*/
final int hashChar = implicitFlowURI.indexOf('#');
final Map<String, String> hashParams = new HashMap<>();
RestUtils.decodeQueryString(implicitFlowURI.substring(hashChar + 1), 0, hashParams);
assertThat("Hash value of URI [" + implicitFlowURI + "] should be a JWT with an id Token", hashParams, hasKey("id_token"));
String idJwt = hashParams.get("id_token");
final Map<String, Object> authenticateResponse = authenticateWithJwtAndSharedSecret(idJwt, FILE_SHARED_SECRET);
assertThat(authenticateResponse, Matchers.hasEntry(User.Fields.USERNAME.getPreferredName(), TEST_SUBJECT_ID));
assertThat(authenticateResponse, Matchers.hasKey(User.Fields.ROLES.getPreferredName()));
assertThat((List<?>) authenticateResponse.get(User.Fields.ROLES.getPreferredName()), contains(FILE_ROLE_NAME));
// test that the proxy realm successfully loads the JWKS
final Map<String, Object> proxyAuthenticateResponse = authenticateWithJwtAndSharedSecret(idJwt, PROXY_SHARED_SECRET);
assertThat(proxyAuthenticateResponse, Matchers.hasEntry(User.Fields.USERNAME.getPreferredName(), TEST_SUBJECT_ID));
assertThat(proxyAuthenticateResponse, Matchers.hasKey(User.Fields.ROLES.getPreferredName()));
assertThat((List<?>) proxyAuthenticateResponse.get(User.Fields.ROLES.getPreferredName()), contains(PROXY_ROLE_NAME));
// Use an incorrect shared secret and check it fails
ResponseException ex = expectThrows(
ResponseException.class,
() -> authenticateWithJwtAndSharedSecret(idJwt, "not-" + FILE_SHARED_SECRET)
);
assertThat(ex.getResponse(), TestMatchers.hasStatusCode(RestStatus.UNAUTHORIZED));
// Modify the JWT payload and check it fails
final int dot = idJwt.indexOf('.');
assertThat(dot, greaterThan(0));
// change the first character of the payload section of the encoded JWT
final String corruptToken = idJwt.substring(0, dot) + "." + transformChar(idJwt.charAt(dot + 1)) + idJwt.substring(dot + 2);
ex = expectThrows(ResponseException.class, () -> authenticateWithJwtAndSharedSecret(corruptToken, FILE_SHARED_SECRET));
assertThat(ex.getResponse(), TestMatchers.hasStatusCode(RestStatus.UNAUTHORIZED));
}
private Map<String, Object> authenticateWithJwtAndSharedSecret(String idJwt, String sharedSecret) throws IOException {
final Map<String, Object> authenticateResponse = super.callAuthenticateApiUsingBearerToken(
idJwt,
RequestOptions.DEFAULT.toBuilder()
.addHeader(
JwtRealm.HEADER_CLIENT_AUTHENTICATION,
JwtRealmSettings.HEADER_SHARED_SECRET_AUTHENTICATION_SCHEME + " " + sharedSecret
)
.build()
);
return authenticateResponse;
}
private char transformChar(char c) {
if (Character.isLowerCase(c)) {
return Character.toUpperCase(c);
}
if (Character.isUpperCase(c)) {
return Character.toLowerCase(c);
}
// For anything non-alphabetic we can just return a random alpha char
return randomAlphaOfLength(1).charAt(0);
}
}
| JwtWithOidcAuthIT |
java | elastic__elasticsearch | x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/type/DataTypeConversionTests.java | {
"start": 1886,
"end": 30260
} | class ____ extends ESTestCase {
public void testConversionToString() {
DataType to = KEYWORD;
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
assertEquals("10.0", conversion.convert(10.0));
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = randomBigInteger();
assertEquals(bi.toString(), conversion.convert(bi));
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
assertEquals("1973-11-29T21:33:09.101Z", conversion.convert(asDateTime(123456789101L)));
assertEquals("1966-02-02T02:26:50.899Z", conversion.convert(asDateTime(-123456789101L)));
assertEquals("2020-05-01T10:20:30.123456789Z", conversion.convert(DateUtils.asDateTime("2020-05-01T10:20:30.123456789Z")));
}
}
/**
* Test conversion to long.
*/
public void testConversionToLong() {
DataType to = LONG;
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
assertEquals(10L, conversion.convert(10.0));
assertEquals(10L, conversion.convert(10.1));
assertEquals(11L, conversion.convert(10.6));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE));
assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage());
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = BigInteger.valueOf(randomNonNegativeLong());
assertEquals(bi.longValue(), conversion.convert(bi));
BigInteger longPlus = bi.add(BigInteger.valueOf(Long.MAX_VALUE));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(longPlus));
assertEquals("[" + longPlus + "] out of [long] range", e.getMessage());
}
{
Converter conversion = converterFor(INTEGER, to);
assertNull(conversion.convert(null));
assertEquals(10L, conversion.convert(10));
assertEquals(-134L, conversion.convert(-134));
}
{
Converter conversion = converterFor(BOOLEAN, to);
assertNull(conversion.convert(null));
assertEquals(1L, conversion.convert(true));
assertEquals(0L, conversion.convert(false));
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
assertEquals(123456789101L, conversion.convert(asDateTime(123456789101L)));
assertEquals(-123456789101L, conversion.convert(asDateTime(-123456789101L)));
// Nanos are ignored, only millis are used
assertEquals(1588328430123L, conversion.convert(DateUtils.asDateTime("2020-05-01T10:20:30.123456789Z")));
}
{
Converter conversion = converterFor(KEYWORD, to);
assertNull(conversion.convert(null));
assertEquals(1L, conversion.convert("1"));
assertEquals(0L, conversion.convert("-0"));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff"));
assertEquals("cannot cast [0xff] to [long]", e.getMessage());
}
}
public void testConversionToDateTime() {
DataType to = DATETIME;
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
assertEquals(asDateTime(10L), conversion.convert(10.0));
assertEquals(asDateTime(10L), conversion.convert(10.1));
assertEquals(asDateTime(11L), conversion.convert(10.6));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Double.MAX_VALUE));
assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage());
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = BigInteger.valueOf(randomNonNegativeLong());
assertEquals(asDateTime(bi.longValue()), conversion.convert(bi));
BigInteger longPlus = bi.add(BigInteger.valueOf(Long.MAX_VALUE));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(longPlus));
assertEquals("[" + longPlus + "] out of [long] range", e.getMessage());
}
{
Converter conversion = converterFor(INTEGER, to);
assertNull(conversion.convert(null));
assertEquals(asDateTime(10L), conversion.convert(10));
assertEquals(asDateTime(-134L), conversion.convert(-134));
}
{
Converter conversion = converterFor(BOOLEAN, to);
assertNull(conversion.convert(null));
assertEquals(asDateTime(1), conversion.convert(true));
assertEquals(asDateTime(0), conversion.convert(false));
}
{
Converter conversion = converterFor(KEYWORD, to);
assertNull(conversion.convert(null));
assertEquals(asDateTime(0L), conversion.convert("1970-01-01"));
assertEquals(asDateTime(1000L), conversion.convert("1970-01-01T00:00:01Z"));
assertEquals(asDateTime(1483228800000L), conversion.convert("2017-01-01T00:00:00Z"));
assertEquals(asDateTime(1483228800000L), conversion.convert("2017-01-01 00:00:00Z"));
assertEquals(asDateTime(1483228800123L), conversion.convert("2017-01-01T00:00:00.123Z"));
assertEquals(asDateTime(1483228800123L), conversion.convert("2017-01-01 00:00:00.123Z"));
assertEquals(asDateTime(18000321L), conversion.convert("1970-01-01T00:00:00.321-05:00"));
assertEquals(asDateTime(18000321L), conversion.convert("1970-01-01 00:00:00.321-05:00"));
assertEquals(asDateTime(3849948162000321L), conversion.convert("+123970-01-01T00:00:00.321-05:00"));
assertEquals(asDateTime(3849948162000321L), conversion.convert("+123970-01-01 00:00:00.321-05:00"));
assertEquals(asDateTime(-818587277999679L), conversion.convert("-23970-01-01T00:00:00.321-05:00"));
assertEquals(asDateTime(-818587277999679L), conversion.convert("-23970-01-01 00:00:00.321-05:00"));
// double check back and forth conversion
ZonedDateTime dt = org.elasticsearch.common.time.DateUtils.nowWithMillisResolution();
Converter forward = converterFor(DATETIME, KEYWORD);
Converter back = converterFor(KEYWORD, DATETIME);
assertEquals(dt, back.convert(forward.convert(dt)));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff"));
assertEquals("cannot cast [0xff] to [datetime]: Text '0xff' could not be parsed at index 0", e.getMessage());
}
}
public void testConversionToFloat() {
DataType to = FLOAT;
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
assertEquals(10.0f, (float) conversion.convert(10.0d), 0.00001);
assertEquals(10.1f, (float) conversion.convert(10.1d), 0.00001);
assertEquals(10.6f, (float) conversion.convert(10.6d), 0.00001);
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = randomBigInteger();
assertEquals(bi.floatValue(), (float) conversion.convert(bi), 0);
}
{
Converter conversion = converterFor(INTEGER, to);
assertNull(conversion.convert(null));
assertEquals(10.0f, (float) conversion.convert(10), 0.00001);
assertEquals(-134.0f, (float) conversion.convert(-134), 0.00001);
}
{
Converter conversion = converterFor(BOOLEAN, to);
assertNull(conversion.convert(null));
assertEquals(1.0f, (float) conversion.convert(true), 0);
assertEquals(0.0f, (float) conversion.convert(false), 0);
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
assertEquals(1.23456789101E11f, (float) conversion.convert(asDateTime(123456789101L)), 0);
assertEquals(-1.23456789101E11f, (float) conversion.convert(asDateTime(-123456789101L)), 0);
// Nanos are ignored, only millis are used
assertEquals(1.5883284E12f, conversion.convert(DateUtils.asDateTime("2020-05-01T10:20:30.123456789Z")));
}
{
Converter conversion = converterFor(KEYWORD, to);
assertNull(conversion.convert(null));
assertEquals(1.0f, (float) conversion.convert("1"), 0);
assertEquals(0.0f, (float) conversion.convert("-0"), 0);
assertEquals(12.776f, (float) conversion.convert("12.776"), 0.00001);
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff"));
assertEquals("cannot cast [0xff] to [float]", e.getMessage());
}
}
public void testConversionToDouble() {
DataType to = DOUBLE;
{
Converter conversion = converterFor(FLOAT, to);
assertNull(conversion.convert(null));
assertEquals(10.0, (double) conversion.convert(10.0f), 0.00001);
assertEquals(10.1, (double) conversion.convert(10.1f), 0.00001);
assertEquals(10.6, (double) conversion.convert(10.6f), 0.00001);
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = randomBigInteger();
assertEquals(bi.doubleValue(), (double) conversion.convert(bi), 0);
}
{
Converter conversion = converterFor(INTEGER, to);
assertNull(conversion.convert(null));
assertEquals(10.0, (double) conversion.convert(10), 0.00001);
assertEquals(-134.0, (double) conversion.convert(-134), 0.00001);
}
{
Converter conversion = converterFor(BOOLEAN, to);
assertNull(conversion.convert(null));
assertEquals(1.0, (double) conversion.convert(true), 0);
assertEquals(0.0, (double) conversion.convert(false), 0);
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
assertEquals(1.23456789101E11, (double) conversion.convert(asDateTime(123456789101L)), 0);
assertEquals(-1.23456789101E11, (double) conversion.convert(asDateTime(-123456789101L)), 0);
// Nanos are ignored, only millis are used
assertEquals(1.588328430123E12, conversion.convert(DateUtils.asDateTime("2020-05-01T10:20:30.123456789Z")));
}
{
Converter conversion = converterFor(KEYWORD, to);
assertNull(conversion.convert(null));
assertEquals(1.0, (double) conversion.convert("1"), 0);
assertEquals(0.0, (double) conversion.convert("-0"), 0);
assertEquals(12.776, (double) conversion.convert("12.776"), 0.00001);
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0xff"));
assertEquals("cannot cast [0xff] to [double]", e.getMessage());
}
}
public void testConversionToBoolean() {
DataType to = BOOLEAN;
{
Converter conversion = converterFor(FLOAT, to);
assertNull(conversion.convert(null));
assertEquals(true, conversion.convert(10.0f));
assertEquals(true, conversion.convert(-10.0f));
assertEquals(false, conversion.convert(0.0f));
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
assertEquals(true, conversion.convert(BigInteger.valueOf(randomNonNegativeLong())));
assertEquals(false, conversion.convert(BigInteger.ZERO));
}
{
Converter conversion = converterFor(INTEGER, to);
assertNull(conversion.convert(null));
assertEquals(true, conversion.convert(10));
assertEquals(true, conversion.convert(-10));
assertEquals(false, conversion.convert(0));
}
{
Converter conversion = converterFor(LONG, to);
assertNull(conversion.convert(null));
assertEquals(true, conversion.convert(10L));
assertEquals(true, conversion.convert(-10L));
assertEquals(false, conversion.convert(0L));
}
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
assertEquals(true, conversion.convert(10.0d));
assertEquals(true, conversion.convert(-10.0d));
assertEquals(false, conversion.convert(0.0d));
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
assertEquals(true, conversion.convert(asDateTime(123456789101L)));
assertEquals(true, conversion.convert(asDateTime(-123456789101L)));
assertEquals(false, conversion.convert(asDateTime(0L)));
}
{
Converter conversion = converterFor(KEYWORD, to);
assertNull(conversion.convert(null));
// We only handled upper and lower case true and false
assertEquals(true, conversion.convert("true"));
assertEquals(false, conversion.convert("false"));
assertEquals(true, conversion.convert("True"));
assertEquals(false, conversion.convert("fAlSe"));
// Everything else should fail
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10"));
assertEquals("cannot cast [10] to [boolean]", e.getMessage());
e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("-1"));
assertEquals("cannot cast [-1] to [boolean]", e.getMessage());
e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("0"));
assertEquals("cannot cast [0] to [boolean]", e.getMessage());
e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("blah"));
assertEquals("cannot cast [blah] to [boolean]", e.getMessage());
e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("Yes"));
assertEquals("cannot cast [Yes] to [boolean]", e.getMessage());
e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("nO"));
assertEquals("cannot cast [nO] to [boolean]", e.getMessage());
}
}
public void testConversionToUnsignedLong() {
DataType to = UNSIGNED_LONG;
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
double d = Math.abs(randomDouble());
assertEquals(BigDecimal.valueOf(d).toBigInteger(), conversion.convert(d));
Double ulmAsDouble = UNSIGNED_LONG_MAX.doubleValue();
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(ulmAsDouble));
assertEquals("[" + ulmAsDouble + "] out of [unsigned_long] range", e.getMessage());
Double nd = -Math.abs(randomDouble());
e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(nd));
assertEquals("[" + nd + "] out of [unsigned_long] range", e.getMessage());
}
{
Converter conversion = converterFor(LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = BigInteger.valueOf(randomNonNegativeLong());
assertEquals(bi, conversion.convert(bi.longValue()));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bi.negate()));
assertEquals("[" + bi.negate() + "] out of [unsigned_long] range", e.getMessage());
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
long l = randomNonNegativeLong();
assertEquals(BigInteger.valueOf(l), conversion.convert(asDateTime(l)));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(-l)));
assertEquals("[" + -l + "] out of [unsigned_long] range", e.getMessage());
}
{
Converter conversion = converterFor(BOOLEAN, to);
assertNull(conversion.convert(null));
assertEquals(BigInteger.ONE, conversion.convert(true));
assertEquals(BigInteger.ZERO, conversion.convert(false));
}
{
Converter conversion = converterFor(KEYWORD, to);
assertNull(conversion.convert(null));
BigInteger bi = randomBigInteger();
assertEquals(bi, conversion.convert(bi.toString()));
assertEquals(UNSIGNED_LONG_MAX, conversion.convert(UNSIGNED_LONG_MAX.toString()));
assertEquals(UNSIGNED_LONG_MAX, conversion.convert(UNSIGNED_LONG_MAX.toString() + ".0"));
assertEquals(bi, conversion.convert(bi.toString() + "." + randomNonNegativeLong()));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(BigInteger.ONE.negate().toString()));
assertEquals("[-1] out of [unsigned_long] range", e.getMessage());
e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(UNSIGNED_LONG_MAX.add(BigInteger.ONE).toString()));
assertEquals("[" + UNSIGNED_LONG_MAX.add(BigInteger.ONE).toString() + "] out of [unsigned_long] range", e.getMessage());
}
}
public void testConversionToInt() {
DataType to = INTEGER;
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
assertEquals(10, conversion.convert(10.0));
assertEquals(10, conversion.convert(10.1));
assertEquals(11, conversion.convert(10.6));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Long.MAX_VALUE));
assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage());
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = BigInteger.valueOf(randomIntBetween(0, Integer.MAX_VALUE));
assertEquals(bi.intValueExact(), conversion.convert(bi));
BigInteger bip = BigInteger.valueOf(randomLongBetween(Integer.MAX_VALUE, Long.MAX_VALUE));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip));
assertEquals("[" + bip + "] out of [integer] range", e.getMessage());
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
assertEquals(12345678, conversion.convert(asDateTime(12345678L)));
assertEquals(223456789, conversion.convert(asDateTime(223456789L)));
assertEquals(-123456789, conversion.convert(asDateTime(-123456789L)));
// Nanos are ignored, only millis are used
assertEquals(62123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:01:02.123456789Z")));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Long.MAX_VALUE)));
assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage());
}
}
public void testConversionToShort() {
DataType to = SHORT;
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
assertEquals((short) 10, conversion.convert(10.0));
assertEquals((short) 10, conversion.convert(10.1));
assertEquals((short) 11, conversion.convert(10.6));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Integer.MAX_VALUE));
assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage());
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = BigInteger.valueOf(randomIntBetween(0, Short.MAX_VALUE));
assertEquals(bi.shortValueExact(), conversion.convert(bi));
BigInteger bip = BigInteger.valueOf(randomLongBetween(Short.MAX_VALUE, Long.MAX_VALUE));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip));
assertEquals("[" + bip + "] out of [short] range", e.getMessage());
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
assertEquals((short) 12345, conversion.convert(asDateTime(12345L)));
assertEquals((short) -12345, conversion.convert(asDateTime(-12345L)));
// Nanos are ignored, only millis are used
assertEquals((short) 1123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:00:01.123456789Z")));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE)));
assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage());
}
}
public void testConversionToByte() {
DataType to = BYTE;
{
Converter conversion = converterFor(DOUBLE, to);
assertNull(conversion.convert(null));
assertEquals((byte) 10, conversion.convert(10.0));
assertEquals((byte) 10, conversion.convert(10.1));
assertEquals((byte) 11, conversion.convert(10.6));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(Short.MAX_VALUE));
assertEquals("[" + Short.MAX_VALUE + "] out of [byte] range", e.getMessage());
}
{
Converter conversion = converterFor(UNSIGNED_LONG, to);
assertNull(conversion.convert(null));
BigInteger bi = BigInteger.valueOf(randomIntBetween(0, Byte.MAX_VALUE));
assertEquals(bi.byteValueExact(), conversion.convert(bi));
BigInteger bip = BigInteger.valueOf(randomLongBetween(Byte.MAX_VALUE, Long.MAX_VALUE));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(bip));
assertEquals("[" + bip + "] out of [byte] range", e.getMessage());
}
{
Converter conversion = converterFor(DATETIME, to);
assertNull(conversion.convert(null));
assertEquals((byte) 123, conversion.convert(asDateTime(123L)));
assertEquals((byte) -123, conversion.convert(asDateTime(-123L)));
// Nanos are ignored, only millis are used
assertEquals((byte) 123, conversion.convert(DateUtils.asDateTime("1970-01-01T00:00:00.123456789Z")));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert(asDateTime(Integer.MAX_VALUE)));
assertEquals("[" + Integer.MAX_VALUE + "] out of [byte] range", e.getMessage());
}
}
public void testConversionToNull() {
Converter conversion = converterFor(DOUBLE, NULL);
assertNull(conversion.convert(null));
assertNull(conversion.convert(10.0));
}
public void testConversionFromNull() {
Converter conversion = converterFor(NULL, INTEGER);
assertNull(conversion.convert(null));
assertNull(conversion.convert(10));
}
public void testConversionToIdentity() {
Converter conversion = converterFor(INTEGER, INTEGER);
assertNull(conversion.convert(null));
assertEquals(10, conversion.convert(10));
}
public void testCommonType() {
assertEquals(BOOLEAN, commonType(BOOLEAN, NULL));
assertEquals(BOOLEAN, commonType(NULL, BOOLEAN));
assertEquals(BOOLEAN, commonType(BOOLEAN, BOOLEAN));
assertEquals(NULL, commonType(NULL, NULL));
assertEquals(INTEGER, commonType(INTEGER, KEYWORD));
assertEquals(LONG, commonType(TEXT, LONG));
assertEquals(SHORT, commonType(SHORT, BYTE));
assertEquals(FLOAT, commonType(BYTE, FLOAT));
assertEquals(FLOAT, commonType(FLOAT, INTEGER));
assertEquals(UNSIGNED_LONG, commonType(UNSIGNED_LONG, LONG));
assertEquals(DOUBLE, commonType(DOUBLE, FLOAT));
assertEquals(FLOAT, commonType(FLOAT, UNSIGNED_LONG));
// strings
assertEquals(TEXT, commonType(TEXT, KEYWORD));
assertEquals(TEXT, commonType(KEYWORD, TEXT));
}
public void testEsDataTypes() {
for (DataType type : DataTypes.types()) {
assertEquals(type, DataTypes.fromTypeName(type.typeName()));
}
}
public void testConversionToUnsupported() {
Exception e = expectThrows(InvalidArgumentException.class, () -> DataTypeConverter.convert(Integer.valueOf(1), UNSUPPORTED));
assertEquals("cannot convert from [1], type [integer] to [unsupported]", e.getMessage());
}
public void testStringToIp() {
Converter conversion = converterFor(KEYWORD, IP);
assertNull(conversion.convert(null));
assertEquals("192.168.1.1", conversion.convert("192.168.1.1"));
Exception e = expectThrows(InvalidArgumentException.class, () -> conversion.convert("10.1.1.300"));
assertEquals("[10.1.1.300] is not a valid IPv4 or IPv6 address", e.getMessage());
}
public void testIpToString() {
Source s = new Source(Location.EMPTY, "10.0.0.1");
Converter ipToString = converterFor(IP, KEYWORD);
assertEquals("10.0.0.1", ipToString.convert(new Literal(s, "10.0.0.1", IP)));
Converter stringToIp = converterFor(KEYWORD, IP);
assertEquals("10.0.0.1", ipToString.convert(stringToIp.convert(new Literal(s, "10.0.0.1", KEYWORD))));
}
public void testStringToVersion() {
Converter conversion = converterFor(randomFrom(TEXT, KEYWORD), VERSION);
assertNull(conversion.convert(null));
assertEquals(new Version("2.1.4").toString(), conversion.convert("2.1.4").toString());
assertEquals(new Version("2.1.4").toBytesRef(), ((Version) conversion.convert("2.1.4")).toBytesRef());
assertEquals(new Version("2.1.4-SNAPSHOT").toString(), conversion.convert("2.1.4-SNAPSHOT").toString());
assertEquals(new Version("2.1.4-SNAPSHOT").toBytesRef(), ((Version) conversion.convert("2.1.4-SNAPSHOT")).toBytesRef());
}
public void testVersionToString() {
Source s = new Source(Location.EMPTY, "2.1.4");
Source s2 = new Source(Location.EMPTY, "2.1.4-SNAPSHOT");
DataType stringType = randomFrom(TEXT, KEYWORD);
Converter versionToString = converterFor(VERSION, stringType);
assertEquals("2.1.4", versionToString.convert(new Literal(s, "2.1.4", VERSION)));
assertEquals("2.1.4-SNAPSHOT", versionToString.convert(new Literal(s2, "2.1.4-SNAPSHOT", VERSION)));
Converter stringToVersion = converterFor(stringType, VERSION);
assertEquals("2.1.4", versionToString.convert(stringToVersion.convert(new Literal(s, "2.1.4", stringType))));
assertEquals("2.1.4-SNAPSHOT", versionToString.convert(stringToVersion.convert(new Literal(s2, "2.1.4-SNAPSHOT", stringType))));
}
}
| DataTypeConversionTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.