language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/configurers/oauth2/client/OAuth2LoginConfigurer.java | {
"start": 8922,
"end": 31328
} | class ____<B extends HttpSecurityBuilder<B>>
extends AbstractAuthenticationFilterConfigurer<B, OAuth2LoginConfigurer<B>, OAuth2LoginAuthenticationFilter> {
private final AuthorizationEndpointConfig authorizationEndpointConfig = new AuthorizationEndpointConfig();
private final TokenEndpointConfig tokenEndpointConfig = new TokenEndpointConfig();
private final RedirectionEndpointConfig redirectionEndpointConfig = new RedirectionEndpointConfig();
private final UserInfoEndpointConfig userInfoEndpointConfig = new UserInfoEndpointConfig();
private String loginPage;
private String loginProcessingUrl = OAuth2LoginAuthenticationFilter.DEFAULT_FILTER_PROCESSES_URI;
private ClientRegistrationRepository clientRegistrationRepository;
private OAuth2AuthorizedClientRepository authorizedClientRepository;
private SecurityContextRepository securityContextRepository;
/**
* Sets the repository of client registrations.
* @param clientRegistrationRepository the repository of client registrations
* @return the {@link OAuth2LoginConfigurer} for further configuration
*/
public OAuth2LoginConfigurer<B> clientRegistrationRepository(
ClientRegistrationRepository clientRegistrationRepository) {
Assert.notNull(clientRegistrationRepository, "clientRegistrationRepository cannot be null");
this.getBuilder().setSharedObject(ClientRegistrationRepository.class, clientRegistrationRepository);
this.clientRegistrationRepository = clientRegistrationRepository;
return this;
}
/**
* Sets the repository for authorized client(s).
* @param authorizedClientRepository the authorized client repository
* @return the {@link OAuth2LoginConfigurer} for further configuration
* @since 5.1
*/
public OAuth2LoginConfigurer<B> authorizedClientRepository(
OAuth2AuthorizedClientRepository authorizedClientRepository) {
Assert.notNull(authorizedClientRepository, "authorizedClientRepository cannot be null");
this.getBuilder().setSharedObject(OAuth2AuthorizedClientRepository.class, authorizedClientRepository);
this.authorizedClientRepository = authorizedClientRepository;
return this;
}
/**
* Sets the service for authorized client(s).
* @param authorizedClientService the authorized client service
* @return the {@link OAuth2LoginConfigurer} for further configuration
*/
public OAuth2LoginConfigurer<B> authorizedClientService(OAuth2AuthorizedClientService authorizedClientService) {
Assert.notNull(authorizedClientService, "authorizedClientService cannot be null");
this.authorizedClientRepository(
new AuthenticatedPrincipalOAuth2AuthorizedClientRepository(authorizedClientService));
return this;
}
@Override
public OAuth2LoginConfigurer<B> loginPage(String loginPage) {
Assert.hasText(loginPage, "loginPage cannot be empty");
this.loginPage = loginPage;
return this;
}
@Override
public OAuth2LoginConfigurer<B> loginProcessingUrl(String loginProcessingUrl) {
Assert.hasText(loginProcessingUrl, "loginProcessingUrl cannot be empty");
this.loginProcessingUrl = loginProcessingUrl;
return this;
}
/**
* Sets the {@link SecurityContextRepository} to use.
* @param securityContextRepository the {@link SecurityContextRepository} to use
* @return the {@link OAuth2LoginConfigurer} for further configuration
*/
@Override
public OAuth2LoginConfigurer<B> securityContextRepository(SecurityContextRepository securityContextRepository) {
this.securityContextRepository = securityContextRepository;
return this;
}
/**
* Sets the registry for managing the OIDC client-provider session link
* @param oidcSessionRegistry the {@link OidcSessionRegistry} to use
* @return the {@link OAuth2LoginConfigurer} for further configuration
* @since 6.2
*/
public OAuth2LoginConfigurer<B> oidcSessionRegistry(OidcSessionRegistry oidcSessionRegistry) {
Assert.notNull(oidcSessionRegistry, "oidcSessionRegistry cannot be null");
getBuilder().setSharedObject(OidcSessionRegistry.class, oidcSessionRegistry);
return this;
}
/**
* Configures the Authorization Server's Authorization Endpoint.
* @param authorizationEndpointCustomizer the {@link Customizer} to provide more
* options for the {@link AuthorizationEndpointConfig}
* @return the {@link OAuth2LoginConfigurer} for further customizations
*/
public OAuth2LoginConfigurer<B> authorizationEndpoint(
Customizer<AuthorizationEndpointConfig> authorizationEndpointCustomizer) {
authorizationEndpointCustomizer.customize(this.authorizationEndpointConfig);
return this;
}
/**
* Configures the Authorization Server's Token Endpoint.
* @param tokenEndpointCustomizer the {@link Customizer} to provide more options for
* the {@link TokenEndpointConfig}
* @return the {@link OAuth2LoginConfigurer} for further customizations
* @throws Exception
*/
public OAuth2LoginConfigurer<B> tokenEndpoint(Customizer<TokenEndpointConfig> tokenEndpointCustomizer) {
tokenEndpointCustomizer.customize(this.tokenEndpointConfig);
return this;
}
/**
* Configures the Client's Redirection Endpoint.
* @param redirectionEndpointCustomizer the {@link Customizer} to provide more options
* for the {@link RedirectionEndpointConfig}
* @return the {@link OAuth2LoginConfigurer} for further customizations
*/
public OAuth2LoginConfigurer<B> redirectionEndpoint(
Customizer<RedirectionEndpointConfig> redirectionEndpointCustomizer) {
redirectionEndpointCustomizer.customize(this.redirectionEndpointConfig);
return this;
}
/**
* Configures the Authorization Server's UserInfo Endpoint.
* @param userInfoEndpointCustomizer the {@link Customizer} to provide more options
* for the {@link UserInfoEndpointConfig}
* @return the {@link OAuth2LoginConfigurer} for further customizations
*/
public OAuth2LoginConfigurer<B> userInfoEndpoint(Customizer<UserInfoEndpointConfig> userInfoEndpointCustomizer) {
userInfoEndpointCustomizer.customize(this.userInfoEndpointConfig);
return this;
}
@Override
public void init(B http) {
OAuth2LoginAuthenticationFilter authenticationFilter = new OAuth2LoginAuthenticationFilter(
this.getClientRegistrationRepository(), this.getAuthorizedClientRepository(), this.loginProcessingUrl);
RequestMatcher processUri = getRequestMatcherBuilder().matcher(this.loginProcessingUrl);
authenticationFilter.setRequiresAuthenticationRequestMatcher(processUri);
authenticationFilter.setSecurityContextHolderStrategy(getSecurityContextHolderStrategy());
if (this.securityContextRepository != null) {
authenticationFilter.setSecurityContextRepository(this.securityContextRepository);
}
this.setAuthenticationFilter(authenticationFilter);
super.loginProcessingUrl(this.loginProcessingUrl);
if (this.loginPage != null) {
// Set custom login page
super.loginPage(this.loginPage);
super.init(http);
}
else {
Map<String, String> loginUrlToClientName = this.getLoginLinks();
if (loginUrlToClientName.size() == 1) {
// Setup auto-redirect to provider login page
// when only 1 client is configured
this.updateAuthenticationDefaults();
this.updateAccessDefaults(http);
String providerLoginPage = loginUrlToClientName.keySet().iterator().next();
this.registerAuthenticationEntryPoint(http, this.getLoginEntryPoint(http, providerLoginPage));
}
else {
super.init(http);
}
}
OAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> accessTokenResponseClient = getAccessTokenResponseClient();
OAuth2UserService<OAuth2UserRequest, OAuth2User> oauth2UserService = getOAuth2UserService();
OAuth2LoginAuthenticationProvider oauth2LoginAuthenticationProvider = new OAuth2LoginAuthenticationProvider(
accessTokenResponseClient, oauth2UserService);
GrantedAuthoritiesMapper userAuthoritiesMapper = this.getGrantedAuthoritiesMapper();
if (userAuthoritiesMapper != null) {
oauth2LoginAuthenticationProvider.setAuthoritiesMapper(userAuthoritiesMapper);
}
http.authenticationProvider(this.postProcess(oauth2LoginAuthenticationProvider));
boolean oidcAuthenticationProviderEnabled = ClassUtils
.isPresent("org.springframework.security.oauth2.jwt.JwtDecoder", this.getClass().getClassLoader());
if (oidcAuthenticationProviderEnabled) {
OAuth2UserService<OidcUserRequest, OidcUser> oidcUserService = getOidcUserService();
OidcAuthorizationCodeAuthenticationProvider oidcAuthorizationCodeAuthenticationProvider = new OidcAuthorizationCodeAuthenticationProvider(
accessTokenResponseClient, oidcUserService);
OidcAuthorizedClientRefreshedEventListener oidcAuthorizedClientRefreshedEventListener = new OidcAuthorizedClientRefreshedEventListener();
oidcAuthorizedClientRefreshedEventListener.setUserService(oidcUserService);
oidcAuthorizedClientRefreshedEventListener
.setApplicationEventPublisher(http.getSharedObject(ApplicationContext.class));
JwtDecoderFactory<ClientRegistration> jwtDecoderFactory = this.getJwtDecoderFactoryBean();
if (jwtDecoderFactory != null) {
oidcAuthorizationCodeAuthenticationProvider.setJwtDecoderFactory(jwtDecoderFactory);
oidcAuthorizedClientRefreshedEventListener.setJwtDecoderFactory(jwtDecoderFactory);
}
if (userAuthoritiesMapper != null) {
oidcAuthorizationCodeAuthenticationProvider.setAuthoritiesMapper(userAuthoritiesMapper);
oidcAuthorizedClientRefreshedEventListener.setAuthoritiesMapper(userAuthoritiesMapper);
}
http.authenticationProvider(this.postProcess(oidcAuthorizationCodeAuthenticationProvider));
registerDelegateApplicationListener(this.postProcess(oidcAuthorizedClientRefreshedEventListener));
configureOidcUserRefreshedEventListener(http);
}
else {
http.authenticationProvider(new OidcAuthenticationRequestChecker());
}
this.initDefaultLoginFilter(http);
}
@Override
public void configure(B http) {
OAuth2AuthorizationRequestRedirectFilter authorizationRequestFilter = new OAuth2AuthorizationRequestRedirectFilter(
getAuthorizationRequestResolver());
if (this.authorizationEndpointConfig.authorizationRequestRepository != null) {
authorizationRequestFilter
.setAuthorizationRequestRepository(this.authorizationEndpointConfig.authorizationRequestRepository);
}
if (this.authorizationEndpointConfig.authorizationRedirectStrategy != null) {
authorizationRequestFilter
.setAuthorizationRedirectStrategy(this.authorizationEndpointConfig.authorizationRedirectStrategy);
}
RequestCache requestCache = http.getSharedObject(RequestCache.class);
if (requestCache != null) {
authorizationRequestFilter.setRequestCache(requestCache);
}
http.addFilter(this.postProcess(authorizationRequestFilter));
OAuth2LoginAuthenticationFilter authenticationFilter = this.getAuthenticationFilter();
if (this.redirectionEndpointConfig.authorizationResponseBaseUri != null) {
authenticationFilter.setRequiresAuthenticationRequestMatcher(
getRequestMatcherBuilder().matcher(this.redirectionEndpointConfig.authorizationResponseBaseUri));
}
if (this.authorizationEndpointConfig.authorizationRequestRepository != null) {
authenticationFilter
.setAuthorizationRequestRepository(this.authorizationEndpointConfig.authorizationRequestRepository);
}
configureOidcSessionRegistry(http);
super.configure(http);
}
@Override
protected RequestMatcher createLoginProcessingUrlMatcher(String loginProcessingUrl) {
return getRequestMatcherBuilder().matcher(loginProcessingUrl);
}
private OAuth2AuthorizationRequestResolver getAuthorizationRequestResolver() {
if (this.authorizationEndpointConfig.authorizationRequestResolver != null) {
return this.authorizationEndpointConfig.authorizationRequestResolver;
}
ClientRegistrationRepository clientRegistrationRepository = this.getClientRegistrationRepository();
ResolvableType resolvableType = ResolvableType.forClass(OAuth2AuthorizationRequestResolver.class);
OAuth2AuthorizationRequestResolver bean = getBeanOrNull(resolvableType);
if (bean != null) {
return bean;
}
String authorizationRequestBaseUri = this.authorizationEndpointConfig.authorizationRequestBaseUri;
if (authorizationRequestBaseUri == null) {
authorizationRequestBaseUri = OAuth2AuthorizationRequestRedirectFilter.DEFAULT_AUTHORIZATION_REQUEST_BASE_URI;
}
return new DefaultOAuth2AuthorizationRequestResolver(clientRegistrationRepository, authorizationRequestBaseUri);
}
private ClientRegistrationRepository getClientRegistrationRepository() {
return (this.clientRegistrationRepository != null) ? this.clientRegistrationRepository
: OAuth2ClientConfigurerUtils.getClientRegistrationRepository(this.getBuilder());
}
private OAuth2AuthorizedClientRepository getAuthorizedClientRepository() {
return (this.authorizedClientRepository != null) ? this.authorizedClientRepository
: OAuth2ClientConfigurerUtils.getAuthorizedClientRepository(this.getBuilder());
}
@SuppressWarnings("unchecked")
private JwtDecoderFactory<ClientRegistration> getJwtDecoderFactoryBean() {
ResolvableType type = ResolvableType.forClassWithGenerics(JwtDecoderFactory.class, ClientRegistration.class);
String[] names = this.getBuilder().getSharedObject(ApplicationContext.class).getBeanNamesForType(type);
if (names.length > 1) {
throw new NoUniqueBeanDefinitionException(type, names);
}
return (JwtDecoderFactory<ClientRegistration>) this.getBuilder()
.getSharedObject(ApplicationContext.class)
.getBeanProvider(type)
.getIfUnique();
}
private GrantedAuthoritiesMapper getGrantedAuthoritiesMapper() {
GrantedAuthoritiesMapper grantedAuthoritiesMapper = this.getBuilder()
.getSharedObject(GrantedAuthoritiesMapper.class);
if (grantedAuthoritiesMapper == null) {
grantedAuthoritiesMapper = this.getGrantedAuthoritiesMapperBean();
if (grantedAuthoritiesMapper != null) {
this.getBuilder().setSharedObject(GrantedAuthoritiesMapper.class, grantedAuthoritiesMapper);
}
}
return grantedAuthoritiesMapper;
}
private GrantedAuthoritiesMapper getGrantedAuthoritiesMapperBean() {
Map<String, GrantedAuthoritiesMapper> grantedAuthoritiesMapperMap = BeanFactoryUtils
.beansOfTypeIncludingAncestors(this.getBuilder().getSharedObject(ApplicationContext.class),
GrantedAuthoritiesMapper.class);
return (!grantedAuthoritiesMapperMap.isEmpty() ? grantedAuthoritiesMapperMap.values().iterator().next() : null);
}
private OAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> getAccessTokenResponseClient() {
if (this.tokenEndpointConfig.accessTokenResponseClient != null) {
return this.tokenEndpointConfig.accessTokenResponseClient;
}
ResolvableType resolvableType = ResolvableType.forClassWithGenerics(OAuth2AccessTokenResponseClient.class,
OAuth2AuthorizationCodeGrantRequest.class);
OAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> bean = getBeanOrNull(resolvableType);
return (bean != null) ? bean : new RestClientAuthorizationCodeTokenResponseClient();
}
private OAuth2UserService<OidcUserRequest, OidcUser> getOidcUserService() {
if (this.userInfoEndpointConfig.oidcUserService != null) {
return this.userInfoEndpointConfig.oidcUserService;
}
ResolvableType type = ResolvableType.forClassWithGenerics(OAuth2UserService.class, OidcUserRequest.class,
OidcUser.class);
OAuth2UserService<OidcUserRequest, OidcUser> bean = getBeanOrNull(type);
return (bean != null) ? bean : new OidcUserService();
}
private OAuth2UserService<OAuth2UserRequest, OAuth2User> getOAuth2UserService() {
if (this.userInfoEndpointConfig.userService != null) {
return this.userInfoEndpointConfig.userService;
}
ResolvableType type = ResolvableType.forClassWithGenerics(OAuth2UserService.class, OAuth2UserRequest.class,
OAuth2User.class);
OAuth2UserService<OAuth2UserRequest, OAuth2User> bean = getBeanOrNull(type);
return (bean != null) ? bean : new DefaultOAuth2UserService();
}
@SuppressWarnings("unchecked")
private <T> T getBeanOrNull(ResolvableType type) {
ApplicationContext context = getBuilder().getSharedObject(ApplicationContext.class);
if (context == null) {
return null;
}
return (T) context.getBeanProvider(type).getIfUnique();
}
private void initDefaultLoginFilter(B http) {
DefaultLoginPageGeneratingFilter loginPageGeneratingFilter = http
.getSharedObject(DefaultLoginPageGeneratingFilter.class);
if (loginPageGeneratingFilter == null || this.isCustomLoginPage()) {
return;
}
loginPageGeneratingFilter.setOauth2LoginEnabled(true);
loginPageGeneratingFilter.setOauth2AuthenticationUrlToClientName(this.getLoginLinks());
loginPageGeneratingFilter.setLoginPageUrl(this.getLoginPage());
loginPageGeneratingFilter.setFailureUrl(this.getFailureUrl());
}
@SuppressWarnings("unchecked")
private Map<String, String> getLoginLinks() {
Iterable<ClientRegistration> clientRegistrations = null;
ClientRegistrationRepository clientRegistrationRepository = this.getClientRegistrationRepository();
ResolvableType type = ResolvableType.forInstance(clientRegistrationRepository).as(Iterable.class);
if (type != ResolvableType.NONE && ClientRegistration.class.isAssignableFrom(type.resolveGenerics()[0])) {
clientRegistrations = (Iterable<ClientRegistration>) clientRegistrationRepository;
}
if (clientRegistrations == null) {
return Collections.emptyMap();
}
String authorizationRequestBaseUri = (this.authorizationEndpointConfig.authorizationRequestBaseUri != null)
? this.authorizationEndpointConfig.authorizationRequestBaseUri
: OAuth2AuthorizationRequestRedirectFilter.DEFAULT_AUTHORIZATION_REQUEST_BASE_URI;
Map<String, String> loginUrlToClientName = new HashMap<>();
clientRegistrations.forEach((registration) -> {
if (AuthorizationGrantType.AUTHORIZATION_CODE.equals(registration.getAuthorizationGrantType())) {
String authorizationRequestUri = authorizationRequestBaseUri + "/" + registration.getRegistrationId();
loginUrlToClientName.put(authorizationRequestUri, registration.getClientName());
}
});
return loginUrlToClientName;
}
private AuthenticationEntryPoint getLoginEntryPoint(B http, String providerLoginPage) {
RequestMatcher loginPageMatcher = getRequestMatcherBuilder().matcher(this.getLoginPage());
RequestMatcher faviconMatcher = getRequestMatcherBuilder().matcher("/favicon.ico");
RequestMatcher defaultEntryPointMatcher = this.getAuthenticationEntryPointMatcher(http);
RequestMatcher defaultLoginPageMatcher = new AndRequestMatcher(
new OrRequestMatcher(loginPageMatcher, faviconMatcher), defaultEntryPointMatcher);
RequestMatcher notXRequestedWith = new NegatedRequestMatcher(
new RequestHeaderRequestMatcher("X-Requested-With", "XMLHttpRequest"));
RequestMatcher formLoginNotEnabled = getFormLoginNotEnabledRequestMatcher(http);
LoginUrlAuthenticationEntryPoint loginUrlEntryPoint = new LoginUrlAuthenticationEntryPoint(providerLoginPage);
RequestMatcher loginUrlMatcher = new AndRequestMatcher(notXRequestedWith,
new NegatedRequestMatcher(defaultLoginPageMatcher), formLoginNotEnabled);
// @formatter:off
AuthenticationEntryPoint loginEntryPoint = DelegatingAuthenticationEntryPoint.builder()
.addEntryPointFor(loginUrlEntryPoint, loginUrlMatcher)
.defaultEntryPoint(getAuthenticationEntryPoint())
.build();
// @formatter:on
ExceptionHandlingConfigurer<B> exceptions = http.getConfigurer(ExceptionHandlingConfigurer.class);
if (exceptions != null) {
RequestMatcher requestMatcher = getAuthenticationEntryPointMatcher(http);
exceptions.defaultDeniedHandlerForMissingAuthority(
(ep) -> ep.addEntryPointFor(loginEntryPoint, requestMatcher),
FactorGrantedAuthority.AUTHORIZATION_CODE_AUTHORITY);
}
return loginEntryPoint;
}
private RequestMatcher getFormLoginNotEnabledRequestMatcher(B http) {
DefaultLoginPageGeneratingFilter defaultLoginPageGeneratingFilter = http
.getSharedObject(DefaultLoginPageGeneratingFilter.class);
Field formLoginEnabledField = (defaultLoginPageGeneratingFilter != null)
? ReflectionUtils.findField(DefaultLoginPageGeneratingFilter.class, "formLoginEnabled") : null;
if (formLoginEnabledField != null) {
ReflectionUtils.makeAccessible(formLoginEnabledField);
return (request) -> Boolean.FALSE
.equals(ReflectionUtils.getField(formLoginEnabledField, defaultLoginPageGeneratingFilter));
}
return AnyRequestMatcher.INSTANCE;
}
private void configureOidcSessionRegistry(B http) {
if (http.getConfigurer(OidcLogoutConfigurer.class) == null
&& http.getSharedObject(OidcSessionRegistry.class) == null) {
return;
}
OidcSessionRegistry sessionRegistry = OAuth2ClientConfigurerUtils.getOidcSessionRegistry(http);
SessionManagementConfigurer<B> sessionConfigurer = http.getConfigurer(SessionManagementConfigurer.class);
if (sessionConfigurer != null) {
OidcSessionRegistryAuthenticationStrategy sessionAuthenticationStrategy = new OidcSessionRegistryAuthenticationStrategy();
sessionAuthenticationStrategy.setSessionRegistry(sessionRegistry);
sessionConfigurer.addSessionAuthenticationStrategy(sessionAuthenticationStrategy);
}
OidcClientSessionEventListener listener = new OidcClientSessionEventListener();
listener.setSessionRegistry(sessionRegistry);
registerDelegateApplicationListener(listener);
}
private void configureOidcUserRefreshedEventListener(B http) {
OidcUserRefreshedEventListener oidcUserRefreshedEventListener = new OidcUserRefreshedEventListener();
oidcUserRefreshedEventListener.setSecurityContextHolderStrategy(this.getSecurityContextHolderStrategy());
SecurityContextRepository securityContextRepository = http.getSharedObject(SecurityContextRepository.class);
if (securityContextRepository != null) {
oidcUserRefreshedEventListener.setSecurityContextRepository(securityContextRepository);
}
registerDelegateApplicationListener(oidcUserRefreshedEventListener);
}
private void registerDelegateApplicationListener(ApplicationListener<?> delegate) {
DelegatingApplicationListener delegating = getBeanOrNull(
ResolvableType.forType(DelegatingApplicationListener.class));
if (delegating == null) {
return;
}
SmartApplicationListener smartListener = new GenericApplicationListenerAdapter(delegate);
delegating.addListener(smartListener);
}
/**
* Configuration options for the Authorization Server's Authorization Endpoint.
*/
public final | OAuth2LoginConfigurer |
java | apache__camel | components/camel-netty/src/test/java/org/apache/camel/component/netty/UnsharableCodecsConflictsTest.java | {
"start": 1427,
"end": 4257
} | class ____ extends BaseNettyTest {
private static final byte[] LENGTH_HEADER = { 0x00, 0x00, 0x40, 0x00 }; // 4096 bytes
private static final Logger LOG = LoggerFactory.getLogger(UnsharableCodecsConflictsTest.class);
@RegisterExtension
protected AvailablePortFinder.Port port2 = AvailablePortFinder.find();
private Processor processor = new P();
@BindToRegistry("length-decoder")
private ChannelHandlerFactory decoder = ChannelHandlerFactories.newLengthFieldBasedFrameDecoder(1048576, 0, 4, 0, 4);
@BindToRegistry("length-decoder2")
private ChannelHandlerFactory decoder2 = ChannelHandlerFactories.newLengthFieldBasedFrameDecoder(1048576, 0, 4, 0, 4);
@Test
public void canSupplyMultipleCodecsToEndpointPipeline() throws Exception {
byte[] sPort1 = new byte[8192];
byte[] sPort2 = new byte[16383];
Arrays.fill(sPort1, (byte) 0x38);
Arrays.fill(sPort2, (byte) 0x39);
byte[] bodyPort1 = (new String(LENGTH_HEADER) + new String(sPort1)).getBytes();
byte[] bodyPort2 = (new String(LENGTH_HEADER) + new String(sPort2)).getBytes();
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived(new String(sPort2) + "9");
Socket server1 = getSocket("localhost", port.getPort());
Socket server2 = getSocket("localhost", port2.getPort());
try {
sendSopBuffer(bodyPort2, server2);
sendSopBuffer(bodyPort1, server1);
sendSopBuffer(new String("9").getBytes(), server2);
} catch (Exception e) {
LOG.error("{}", e.getMessage(), e);
} finally {
server1.close();
server2.close();
}
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("netty:tcp://localhost:" + port.getPort() + "?decoders=#length-decoder&sync=false").process(processor);
from("netty:tcp://localhost:" + port2.getPort() + "?decoders=#length-decoder2&sync=false").process(processor)
.to("mock:result");
}
};
}
private static Socket getSocket(String host, int port) throws IOException {
Socket s = new Socket(host, port);
s.setSoTimeout(60000);
return s;
}
public static void sendSopBuffer(byte[] buf, Socket server) throws Exception {
BufferedOutputStream dataOut = IOHelper.buffered(server.getOutputStream());
try {
dataOut.write(buf, 0, buf.length);
dataOut.flush();
} catch (Exception e) {
IOHelper.close(dataOut);
server.close();
throw e;
}
}
| UnsharableCodecsConflictsTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java | {
"start": 7602,
"end": 31713
} | class ____ extends ServiceComparator {
private final boolean avoidStaleDataNodesForRead;
private final long staleInterval;
private final boolean avoidSlowDataNodesForRead;
private final Set<String> slowNodesUuidSet;
/**
* Constructor of ServiceAndStaleComparator
* @param avoidStaleDataNodesForRead
* Whether or not to avoid using stale DataNodes for reading.
* @param interval
* The time interval for marking datanodes as stale is passed from
* outside, since the interval may be changed dynamically.
* @param avoidSlowDataNodesForRead
* Whether or not to avoid using slow DataNodes for reading.
* @param slowNodesUuidSet
* Slow DataNodes UUID set.
*/
public StaleAndSlowComparator(
boolean avoidStaleDataNodesForRead, long interval,
boolean avoidSlowDataNodesForRead, Set<String> slowNodesUuidSet) {
this.avoidStaleDataNodesForRead = avoidStaleDataNodesForRead;
this.staleInterval = interval;
this.avoidSlowDataNodesForRead = avoidSlowDataNodesForRead;
this.slowNodesUuidSet = slowNodesUuidSet;
}
@Override
public int compare(DatanodeInfo a, DatanodeInfo b) {
int ret = super.compare(a, b);
if (ret != 0) {
return ret;
}
// Stale nodes will be moved behind the normal nodes
if (avoidStaleDataNodesForRead) {
boolean aStale = a.isStale(staleInterval);
boolean bStale = b.isStale(staleInterval);
ret = aStale == bStale ? 0 : (aStale ? 1 : -1);
if (ret != 0) {
return ret;
}
}
// Slow nodes will be moved behind the normal nodes
if (avoidSlowDataNodesForRead) {
boolean aSlow = slowNodesUuidSet.contains(a.getDatanodeUuid());
boolean bSlow = slowNodesUuidSet.contains(b.getDatanodeUuid());
ret = aSlow == bSlow ? 0 : (aSlow ? 1 : -1);
}
return ret;
}
}
/**
* Address matcher for matching an address to local address
*/
static final AddressMatcher LOCAL_ADDRESS_MATCHER = new AddressMatcher() {
@Override
public boolean match(InetSocketAddress s) {
return NetUtils.isLocalAddress(s.getAddress());
};
};
/**
* Whether the pathname is valid. Currently prohibits relative paths,
* names which contain a ":" or "//", or other non-canonical paths.
*/
public static boolean isValidName(String src) {
return DFSUtilClient.isValidName(src);
}
/**
* Checks if a string is a valid path component. For instance, components
* cannot contain a ":" or "/", and cannot be equal to a reserved component
* like ".snapshot".
* <p>
* The primary use of this method is for validating paths when loading the
* FSImage. During normal NN operation, paths are sometimes allowed to
* contain reserved components.
*
* @return If component is valid
*/
public static boolean isValidNameForComponent(String component) {
if (component.equals(".") ||
component.equals("..") ||
component.indexOf(":") >= 0 ||
component.indexOf("/") >= 0) {
return false;
}
return !isReservedPathComponent(component);
}
/**
* Returns if the component is reserved.
*
* <p>
* Note that some components are only reserved under certain directories, e.g.
* "/.reserved" is reserved, while "/hadoop/.reserved" is not.
* @return true, if the component is reserved
*/
public static boolean isReservedPathComponent(String component) {
for (String reserved : HdfsServerConstants.RESERVED_PATH_COMPONENTS) {
if (component.equals(reserved)) {
return true;
}
}
return false;
}
/**
* Converts a byte array to a string using UTF8 encoding.
*/
public static String bytes2String(byte[] bytes) {
return bytes2String(bytes, 0, bytes.length);
}
/**
* Decode a specific range of bytes of the given byte array to a string
* using UTF8.
*
* @param bytes The bytes to be decoded into characters
* @param offset The index of the first byte to decode
* @param length The number of bytes to decode
* @return The decoded string
*/
public static String bytes2String(byte[] bytes, int offset, int length) {
return DFSUtilClient.bytes2String(bytes, 0, bytes.length);
}
/**
* Converts a string to a byte array using UTF8 encoding.
*/
public static byte[] string2Bytes(String str) {
return DFSUtilClient.string2Bytes(str);
}
/**
* Given a list of path components returns a path as a UTF8 String
*/
public static String byteArray2PathString(final byte[][] components,
final int offset, final int length) {
// specifically not using StringBuilder to more efficiently build
// string w/o excessive byte[] copies and charset conversions.
final int range = offset + length;
if (offset < 0 || range < offset || range > components.length) {
throw new IndexOutOfBoundsException(
"Incorrect index [offset, range, size] ["
+ offset + ", " + range + ", " + components.length + "]");
}
if (length == 0) {
return "";
}
// absolute paths start with either null or empty byte[]
byte[] firstComponent = components[offset];
boolean isAbsolute = (offset == 0 &&
(firstComponent == null || firstComponent.length == 0));
if (offset == 0 && length == 1) {
return isAbsolute ? Path.SEPARATOR : bytes2String(firstComponent);
}
// compute length of full byte[], seed with 1st component and delimiters
int pos = isAbsolute ? 0 : firstComponent.length;
int size = pos + length - 1;
for (int i=offset + 1; i < range; i++) {
size += components[i].length;
}
final byte[] result = new byte[size];
if (!isAbsolute) {
System.arraycopy(firstComponent, 0, result, 0, firstComponent.length);
}
// append remaining components as "/component".
for (int i=offset + 1; i < range; i++) {
result[pos++] = (byte)Path.SEPARATOR_CHAR;
int len = components[i].length;
System.arraycopy(components[i], 0, result, pos, len);
pos += len;
}
return bytes2String(result);
}
public static String byteArray2PathString(byte[][] pathComponents) {
return byteArray2PathString(pathComponents, 0, pathComponents.length);
}
/**
* Converts a list of path components into a path using Path.SEPARATOR.
*
* @param components Path components
* @return Combined path as a UTF-8 string
*/
public static String strings2PathString(String[] components) {
if (components.length == 0) {
return "";
}
if (components.length == 1) {
if (components[0] == null || components[0].isEmpty()) {
return Path.SEPARATOR;
}
}
return Joiner.on(Path.SEPARATOR).join(components);
}
/** Convert an object representing a path to a string. */
public static String path2String(final Object path) {
return path == null? null
: path instanceof String? (String)path
: path instanceof byte[][]? byteArray2PathString((byte[][])path)
: path.toString();
}
/**
* Convert a UTF8 string to an array of byte arrays.
*/
public static byte[][] getPathComponents(String path) {
// avoid intermediate split to String[]
final byte[] bytes = string2Bytes(path);
return DFSUtilClient
.bytes2byteArray(bytes, bytes.length, (byte) Path.SEPARATOR_CHAR);
}
/**
* Splits the array of bytes into array of arrays of bytes
* on byte separator
* @param bytes the array of bytes to split
* @param separator the delimiting byte
*/
public static byte[][] bytes2byteArray(byte[] bytes, byte separator) {
return bytes2byteArray(bytes, bytes.length, separator);
}
/**
* Splits first len bytes in bytes to array of arrays of bytes
* on byte separator
* @param bytes the byte array to split
* @param len the number of bytes to split
* @param separator the delimiting byte
*/
public static byte[][] bytes2byteArray(byte[] bytes, int len,
byte separator) {
return DFSUtilClient.bytes2byteArray(bytes, len, separator);
}
/**
* Return configuration key of format key.suffix1.suffix2...suffixN
*/
public static String addKeySuffixes(String key, String... suffixes) {
String keySuffix = DFSUtilClient.concatSuffixes(suffixes);
return DFSUtilClient.addSuffix(key, keySuffix);
}
/**
* Get all of the RPC addresses of the individual NNs in a given nameservice.
*
* @param conf Configuration
* @param nsId the nameservice whose NNs addresses we want.
* @param defaultValue default address to return in case key is not found.
* @return A map from nnId {@literal ->} RPC address of each NN in the
* nameservice.
*/
public static Map<String, InetSocketAddress> getRpcAddressesForNameserviceId(
Configuration conf, String nsId, String defaultValue) {
return DFSUtilClient.getAddressesForNameserviceId(conf, nsId, defaultValue,
DFS_NAMENODE_RPC_ADDRESS_KEY);
}
/**
* @return a collection of all configured NN Kerberos principals.
*/
public static Set<String> getAllNnPrincipals(Configuration conf) throws IOException {
Set<String> principals = new HashSet<String>();
for (String nsId : DFSUtilClient.getNameServiceIds(conf)) {
if (HAUtil.isHAEnabled(conf, nsId)) {
for (String nnId : DFSUtilClient.getNameNodeIds(conf, nsId)) {
Configuration confForNn = new Configuration(conf);
NameNode.initializeGenericKeys(confForNn, nsId, nnId);
String principal = SecurityUtil.getServerPrincipal(confForNn
.get(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY),
DFSUtilClient.getNNAddress(confForNn).getHostName());
principals.add(principal);
}
} else {
Configuration confForNn = new Configuration(conf);
NameNode.initializeGenericKeys(confForNn, nsId, null);
String principal = SecurityUtil.getServerPrincipal(confForNn
.get(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY),
DFSUtilClient.getNNAddress(confForNn).getHostName());
principals.add(principal);
}
}
return principals;
}
/**
* Returns list of Journalnode addresses from the configuration.
*
* @param conf configuration
* @return list of journalnode host names
* @throws URISyntaxException
* @throws IOException
*/
public static Set<String> getJournalNodeAddresses(
Configuration conf) throws URISyntaxException, IOException {
Set<String> journalNodeList = new HashSet<>();
String journalsUri = "";
try {
journalsUri = conf.get(DFS_NAMENODE_SHARED_EDITS_DIR_KEY);
if (journalsUri == null) {
Collection<String> nameserviceIds = DFSUtilClient.
getNameServiceIds(conf);
for (String nsId : nameserviceIds) {
journalsUri = DFSUtilClient.getConfValue(
null, nsId, conf, DFS_NAMENODE_SHARED_EDITS_DIR_KEY);
if (journalsUri == null) {
Collection<String> nnIds = DFSUtilClient.getNameNodeIds(conf, nsId);
for (String nnId : nnIds) {
String suffix = DFSUtilClient.concatSuffixes(nsId, nnId);
journalsUri = DFSUtilClient.getConfValue(
null, suffix, conf, DFS_NAMENODE_SHARED_EDITS_DIR_KEY);
if (journalsUri == null ||
!journalsUri.startsWith("qjournal://")) {
return journalNodeList;
} else {
LOG.warn(DFS_NAMENODE_SHARED_EDITS_DIR_KEY +" is to be " +
"configured as nameservice" +
" specific key(append it with nameserviceId), no need" +
" to append it with namenodeId");
URI uri = new URI(journalsUri);
List<InetSocketAddress> socketAddresses = Util.
getAddressesList(uri, conf);
for (InetSocketAddress is : socketAddresses) {
journalNodeList.add(is.getHostName());
}
}
}
} else if (!journalsUri.startsWith("qjournal://")) {
return journalNodeList;
} else {
URI uri = new URI(journalsUri);
List<InetSocketAddress> socketAddresses = Util.
getAddressesList(uri, conf);
for (InetSocketAddress is : socketAddresses) {
journalNodeList.add(is.getHostName());
}
}
}
} else {
if (!journalsUri.startsWith("qjournal://")) {
return journalNodeList;
} else {
URI uri = new URI(journalsUri);
List<InetSocketAddress> socketAddresses = Util.getAddressesList(uri, conf);
for (InetSocketAddress is : socketAddresses) {
journalNodeList.add(is.getHostName());
}
}
}
} catch(UnknownHostException e) {
LOG.error("The conf property " + DFS_NAMENODE_SHARED_EDITS_DIR_KEY
+ " is not properly set with correct journal node hostnames");
throw new UnknownHostException(journalsUri);
} catch(URISyntaxException e) {
LOG.error("The conf property " + DFS_NAMENODE_SHARED_EDITS_DIR_KEY
+ "is not set properly with correct journal node uri");
throw new URISyntaxException(journalsUri, "The conf property " +
DFS_NAMENODE_SHARED_EDITS_DIR_KEY + "is not" +
" properly set with correct journal node uri");
}
return journalNodeList;
}
/**
* Returns list of InetSocketAddress corresponding to backup node rpc
* addresses from the configuration.
*
* @param conf configuration
* @return list of InetSocketAddresses
* @throws IOException on error
*/
public static Map<String, Map<String, InetSocketAddress>> getBackupNodeAddresses(
Configuration conf) throws IOException {
Map<String, Map<String, InetSocketAddress>> addressList = DFSUtilClient.getAddresses(
conf, null, DFS_NAMENODE_BACKUP_ADDRESS_KEY);
if (addressList.isEmpty()) {
throw new IOException("Incorrect configuration: backup node address "
+ DFS_NAMENODE_BACKUP_ADDRESS_KEY + " is not configured.");
}
return addressList;
}
/**
* Returns list of InetSocketAddresses of corresponding to secondary namenode
* http addresses from the configuration.
*
* @param conf configuration
* @return list of InetSocketAddresses
* @throws IOException on error
*/
public static Map<String, Map<String, InetSocketAddress>> getSecondaryNameNodeAddresses(
Configuration conf) throws IOException {
Map<String, Map<String, InetSocketAddress>> addressList = DFSUtilClient.getAddresses(
conf, null, DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY);
if (addressList.isEmpty()) {
throw new IOException("Incorrect configuration: secondary namenode address "
+ DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY + " is not configured.");
}
return addressList;
}
/**
* Returns list of InetSocketAddresses corresponding to namenodes from the
* configuration.
*
* Returns namenode address specifically configured for datanodes (using
* service ports), if found. If not, regular RPC address configured for other
* clients is returned.
*
* @param conf configuration
* @return list of InetSocketAddress
* @throws IOException on error
*/
public static Map<String, Map<String, InetSocketAddress>> getNNServiceRpcAddresses(
Configuration conf) throws IOException {
// Use default address as fall back
String defaultAddress;
try {
defaultAddress = NetUtils.getHostPortString(
DFSUtilClient.getNNAddress(conf));
} catch (IllegalArgumentException e) {
defaultAddress = null;
}
Map<String, Map<String, InetSocketAddress>> addressList =
DFSUtilClient.getAddresses(conf, defaultAddress,
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
DFS_NAMENODE_RPC_ADDRESS_KEY);
if (addressList.isEmpty()) {
throw new IOException("Incorrect configuration: namenode address "
+ DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY + " or "
+ DFS_NAMENODE_RPC_ADDRESS_KEY
+ " is not configured.");
}
return addressList;
}
/**
* Returns list of InetSocketAddresses corresponding to the namenode
* that manages this cluster. Note this is to be used by datanodes to get
* the list of namenode addresses to talk to.
*
* Returns namenode address specifically configured for datanodes (using
* service ports), if found. If not, regular RPC address configured for other
* clients is returned.
*
* @param conf configuration
* @return list of InetSocketAddress
* @throws IOException on error
*/
public static Map<String, Map<String, InetSocketAddress>>
getNNServiceRpcAddressesForCluster(Configuration conf) throws IOException {
// Use default address as fall back
String defaultAddress;
try {
defaultAddress = NetUtils.getHostPortString(
DFSUtilClient.getNNAddress(conf));
} catch (IllegalArgumentException e) {
defaultAddress = null;
}
Collection<String> parentNameServices = getParentNameServices(conf);
Map<String, Map<String, InetSocketAddress>> addressList =
getAddressesForNsIds(conf, parentNameServices,
defaultAddress,
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
DFS_NAMENODE_RPC_ADDRESS_KEY);
if (addressList.isEmpty()) {
throw new IOException("Incorrect configuration: namenode address "
+ DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY + "." + parentNameServices
+ " or "
+ DFS_NAMENODE_RPC_ADDRESS_KEY + "." + parentNameServices
+ " is not configured.");
}
return addressList;
}
/**
* Returns list of InetSocketAddresses corresponding to lifeline RPC servers
* at namenodes from the configuration.
*
* @param conf configuration
* @return list of InetSocketAddress
* @throws IOException on error
*/
public static Map<String, Map<String, InetSocketAddress>>
getNNLifelineRpcAddressesForCluster(Configuration conf)
throws IOException {
Collection<String> parentNameServices = getParentNameServices(conf);
return getAddressesForNsIds(conf, parentNameServices, null,
DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY);
}
//
/**
* Returns the configured address for all NameNodes in the cluster.
* This is similar with DFSUtilClient.getAddressesForNsIds()
* but can access DFSConfigKeys.
*
* @param conf configuration
* @param defaultAddress default address to return in case key is not found.
* @param keys Set of keys to look for in the order of preference
*
* @return a map(nameserviceId to map(namenodeId to InetSocketAddress))
*/
static Map<String, Map<String, InetSocketAddress>> getAddressesForNsIds(
Configuration conf, Collection<String> nsIds, String defaultAddress,
String... keys) {
// Look for configurations of the form
// <key>[.<nameserviceId>][.<namenodeId>]
// across all of the configured nameservices and namenodes.
Map<String, Map<String, InetSocketAddress>> ret = Maps.newLinkedHashMap();
for (String nsId : DFSUtilClient.emptyAsSingletonNull(nsIds)) {
String configKeyWithHost =
DFSConfigKeys.DFS_NAMESERVICES_RESOLUTION_ENABLED + "." + nsId;
boolean resolveNeeded = conf.getBoolean(configKeyWithHost,
DFSConfigKeys.DFS_NAMESERVICES_RESOLUTION_ENABLED_DEFAULT);
Map<String, InetSocketAddress> isas;
if (resolveNeeded) {
DomainNameResolver dnr = DomainNameResolverFactory.newInstance(
conf, nsId, DFSConfigKeys.DFS_NAMESERVICES_RESOLVER_IMPL);
isas = DFSUtilClient.getResolvedAddressesForNsId(
conf, nsId, dnr, defaultAddress, keys);
} else {
isas = DFSUtilClient.getAddressesForNameserviceId(
conf, nsId, defaultAddress, keys);
}
if (!isas.isEmpty()) {
ret.put(nsId, isas);
}
}
return ret;
}
private static Collection<String> getParentNameServices(Configuration conf)
throws IOException {
Collection<String> parentNameServices = conf.getTrimmedStringCollection(
DFSConfigKeys.DFS_INTERNAL_NAMESERVICES_KEY);
if (parentNameServices.isEmpty()) {
parentNameServices = conf.getTrimmedStringCollection(
DFSConfigKeys.DFS_NAMESERVICES);
} else {
// Ensure that the internal service is indeed in the list of all available
// nameservices.
Collection<String> namespaces = conf
.getTrimmedStringCollection(DFSConfigKeys.DFS_NAMESERVICES);
Set<String> availableNameServices = new HashSet<>(namespaces);
for (String nsId : parentNameServices) {
if (!availableNameServices.contains(nsId)) {
throw new IOException("Unknown nameservice: " + nsId);
}
}
}
return parentNameServices;
}
/**
* Map a logical namenode ID to its lifeline address. Use the given
* nameservice if specified, or the configured one if none is given.
*
* @param conf Configuration
* @param nsId which nameservice nnId is a part of, optional
* @param nnId the namenode ID to get the service addr for
* @return the lifeline addr, null if it could not be determined
*/
public static String getNamenodeLifelineAddr(final Configuration conf,
String nsId, String nnId) {
if (nsId == null) {
nsId = getOnlyNameServiceIdOrNull(conf);
}
String lifelineAddrKey = DFSUtilClient.concatSuffixes(
DFSConfigKeys.DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, nsId, nnId);
return conf.get(lifelineAddrKey);
}
/**
* Flatten the given map, as returned by other functions in this class,
* into a flat list of {@link ConfiguredNNAddress} instances.
*/
public static List<ConfiguredNNAddress> flattenAddressMap(
Map<String, Map<String, InetSocketAddress>> map) {
List<ConfiguredNNAddress> ret = Lists.newArrayList();
for (Map.Entry<String, Map<String, InetSocketAddress>> entry :
map.entrySet()) {
String nsId = entry.getKey();
Map<String, InetSocketAddress> nnMap = entry.getValue();
for (Map.Entry<String, InetSocketAddress> e2 : nnMap.entrySet()) {
String nnId = e2.getKey();
InetSocketAddress addr = e2.getValue();
ret.add(new ConfiguredNNAddress(nsId, nnId, addr));
}
}
return ret;
}
/**
* Format the given map, as returned by other functions in this class,
* into a string suitable for debugging display. The format of this string
* should not be considered an interface, and is liable to change.
*/
public static String addressMapToString(
Map<String, Map<String, InetSocketAddress>> map) {
StringBuilder b = new StringBuilder();
for (Map.Entry<String, Map<String, InetSocketAddress>> entry :
map.entrySet()) {
String nsId = entry.getKey();
Map<String, InetSocketAddress> nnMap = entry.getValue();
b.append("Nameservice <").append(nsId).append(">:").append("\n");
for (Map.Entry<String, InetSocketAddress> e2 : nnMap.entrySet()) {
b.append(" NN ID ").append(e2.getKey())
.append(" => ").append(e2.getValue()).append("\n");
}
}
return b.toString();
}
public static String nnAddressesAsString(Configuration conf) {
Map<String, Map<String, InetSocketAddress>> addresses =
DFSUtilClient.getHaNnRpcAddresses(conf);
return addressMapToString(addresses);
}
/**
* Represent one of the NameNodes configured in the cluster.
*/
public static | StaleAndSlowComparator |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MvelEndpointBuilderFactory.java | {
"start": 1434,
"end": 1553
} | interface ____ {
/**
* Builder for endpoint for the MVEL component.
*/
public | MvelEndpointBuilderFactory |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java | {
"start": 2137,
"end": 2383
} | interface ____ {
/**
* @return Information about the bundle of jars used in this plugin
*/
PluginBundle pluginBundle();
/**
* @return The {@link ClassLoader} used to instantiate the main | PluginLayer |
java | micronaut-projects__micronaut-core | context/src/test/groovy/io/micronaut/scheduling/exceptions/ThrowsExceptionJob1.java | {
"start": 996,
"end": 1158
} | class ____ {
@Scheduled(fixedRate = "10ms")
public void runSomething() {
throw new InstantiationException("bad things");
}
}
| ThrowsExceptionJob1 |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnly_Test.java | {
"start": 2577,
"end": 15771
} | class ____ extends MapsBaseTest {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
Entry<String, String>[] entries = array(entry("name", "Yoda"));
// WHEN
var assertionError = expectAssertionError(() -> maps.assertContainsOnly(INFO, null, entries, null));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@Test
void should_fail_if_given_entries_array_is_null() {
// GIVEN
Entry<String, String>[] entries = null;
// WHEN
Throwable thrown = catchThrowable(() -> maps.assertContainsOnly(INFO, actual, entries, null));
// THEN
then(thrown).isInstanceOf(NullPointerException.class).hasMessage(entriesToLookForIsNull());
}
@Test
void should_fail_if_given_entries_array_is_empty() {
// GIVEN
Entry<String, String>[] entries = emptyEntries();
// WHEN
var error = expectAssertionError(() -> maps.assertContainsOnly(INFO, actual, entries, null));
// THEN
then(error).hasMessage(shouldBeEmpty(actual).create());
}
@Test
void should_pass_if_value_type_is_array() {
// GIVEN
Map<String, byte[]> actual = mapOf(entry("key1", new byte[] { 1, 2 }), entry("key2", new byte[] { 3, 4, 5 }));
Entry<String, byte[]>[] expected = array(entry("key2", new byte[] { 3, 4, 5 }), entry("key1", new byte[] { 1, 2 }));
// WHEN/THEN
assertThatNoException().isThrownBy(() -> maps.assertContainsOnly(info, actual, expected, null));
}
@ParameterizedTest
@MethodSource({
"unmodifiableMapsSuccessfulTestCases",
"modifiableMapsSuccessfulTestCases",
"caseInsensitiveMapsSuccessfulTestCases",
})
void should_pass(Map<String, String> actual, Entry<String, String>[] expected) {
// GIVEN
int initialSize = actual.size();
// WHEN/THEN
assertThatNoException().as(actual.getClass().getName())
.isThrownBy(() -> maps.assertContainsOnly(info, actual, expected, null));
then(actual).hasSize(initialSize);
}
private static Stream<Arguments> unmodifiableMapsSuccessfulTestCases() {
return Stream.of(arguments(emptyMap(), emptyEntries()),
arguments(singletonMap("name", "Yoda"),
array(entry("name", "Yoda"))),
arguments(new SingletonMap<>("name", "Yoda"),
array(entry("name", "Yoda"))),
arguments(unmodifiableMap(mapOf(entry("name", "Yoda"), entry("job", "Jedi"))),
array(entry("name", "Yoda"), entry("job", "Jedi"))),
arguments(unmodifiableMap(mapOf(entry("name", "Yoda"), entry("job", "Jedi"))),
array(entry("job", "Jedi"), entry("name", "Yoda"))),
arguments(ImmutableMap.of("name", "Yoda", "job", "Jedi"),
array(entry("name", "Yoda"), entry("job", "Jedi"))),
arguments(ImmutableMap.of("name", "Yoda", "job", "Jedi"),
array(entry("job", "Jedi"), entry("name", "Yoda"))));
}
private static Stream<Arguments> modifiableMapsSuccessfulTestCases() {
return Stream.of(MODIFIABLE_MAPS)
.flatMap(supplier -> Stream.of(arguments(mapOf(supplier, entry("name", "Yoda"), entry("job", "Jedi")),
array(entry("name", "Yoda"), entry("job", "Jedi"))),
arguments(mapOf(supplier, entry("name", "Yoda"), entry("job", "Jedi")),
array(entry("job", "Jedi"), entry("name", "Yoda")))));
}
private static Stream<Arguments> caseInsensitiveMapsSuccessfulTestCases() {
return Stream.of(ArrayUtils.add(CASE_INSENSITIVE_MAPS, CaseInsensitiveMap::new))
.flatMap(supplier -> Stream.of(arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
array(entry("name", "Yoda"), entry("job", "Jedi"))),
arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
array(entry("job", "Jedi"), entry("name", "Yoda"))),
arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
array(entry("Name", "Yoda"), entry("Job", "Jedi"))),
arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
array(entry("Job", "Jedi"), entry("Name", "Yoda")))));
}
@Test
void should_pass_with_MultiValueMapAdapter() {
// GIVEN
MultiValueMapAdapter<String, String> actual = new MultiValueMapAdapter<>(mapOf(entry("name", list("Yoda"))));
Entry<String, List<String>>[] expected = array(entry("name", list("Yoda")));
int initialSize = actual.size();
// WHEN
maps.assertContainsOnly(info, actual, expected, null);
// THEN
then(actual).hasSize(initialSize);
}
@Test
void should_pass_with_MultivaluedHashMap() {
// GIVEN
MultivaluedHashMap<String, String> actual = new MultivaluedHashMap<>(mapOf(entry("name", "Yoda")));
Entry<String, List<String>>[] expected = array(entry("name", list("Yoda")));
int initialSize = actual.size();
// WHEN
maps.assertContainsOnly(info, actual, expected, null);
// THEN
then(actual).hasSize(initialSize);
}
@ParameterizedTest
@MethodSource({
"unmodifiableMapsFailureTestCases",
"modifiableMapsFailureTestCases",
"caseInsensitiveMapsFailureTestCases",
"commonsCollectionsCaseInsensitiveMapFailureTestCases",
"orderDependentFailureTestCases",
})
void should_fail(Map<String, String> actual, Entry<String, String>[] expected,
Set<Entry<String, String>> notFound, Set<Entry<String, String>> notExpected) {
// GIVEN
int initialSize = actual.size();
// WHEN
assertThatExceptionOfType(AssertionError.class).as(actual.getClass().getName())
.isThrownBy(() -> maps.assertContainsOnly(info, actual, expected, null))
// THEN
.withMessage(shouldContainOnly(actual, expected,
notFound, notExpected).create());
then(actual).hasSize(initialSize);
}
private static Stream<Arguments> unmodifiableMapsFailureTestCases() {
return Stream.of(arguments(emptyMap(),
array(entry("name", "Yoda")),
set(entry("name", "Yoda")),
emptySet()),
arguments(singletonMap("name", "Yoda"),
array(entry("color", "Green")),
set(entry("color", "Green")),
set(entry("name", "Yoda"))),
arguments(new SingletonMap<>("name", "Yoda"),
array(entry("color", "Green")),
set(entry("color", "Green")),
set(entry("name", "Yoda"))),
arguments(unmodifiableMap(mapOf(entry("name", "Yoda"), entry("job", "Jedi"))),
array(entry("name", "Yoda"), entry("color", "Green")),
set(entry("color", "Green")),
set(entry("job", "Jedi"))),
arguments(ImmutableMap.of("name", "Yoda", "job", "Jedi"),
array(entry("name", "Yoda"), entry("color", "Green")),
set(entry("color", "Green")),
set(entry("job", "Jedi"))));
}
private static Stream<Arguments> modifiableMapsFailureTestCases() {
return Stream.of(MODIFIABLE_MAPS)
.flatMap(supplier -> Stream.of(arguments(mapOf(supplier, entry("name", "Yoda")),
array(entry("name", "Yoda"), entry("color", "Green")),
set(entry("color", "Green")),
emptySet()),
arguments(mapOf(supplier, entry("name", "Yoda"), entry("job", "Jedi")),
array(entry("name", "Yoda")),
emptySet(),
set(entry("job", "Jedi"))),
arguments(mapOf(supplier, entry("name", "Yoda"), entry("job", "Jedi")),
array(entry("name", "Yoda"), entry("color", "Green")),
set(entry("color", "Green")),
set(entry("job", "Jedi")))));
}
private static Stream<Arguments> caseInsensitiveMapsFailureTestCases() {
return Stream.of(CASE_INSENSITIVE_MAPS)
.flatMap(supplier -> Stream.of(arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
array(entry("name", "Yoda"), entry("color", "Green")),
set(entry("color", "Green")),
set(entry("Job", "Jedi"))),
arguments(mapOf(supplier, entry("NAME", "Yoda"), entry("Job", "Jedi")),
array(entry("Name", "Yoda"), entry("Color", "Green")),
set(entry("Color", "Green")),
set(entry("Job", "Jedi")))));
}
private static Stream<Arguments> commonsCollectionsCaseInsensitiveMapFailureTestCases() {
return Stream.of(arguments(mapOf(CaseInsensitiveMap::new, entry("NAME", "Yoda"), entry("Job", "Jedi")),
array(entry("name", "Yoda"), entry("color", "Green")),
set(entry("color", "Green")),
set(entry("job", "Jedi"))), // internal keys are always lowercase
arguments(mapOf(CaseInsensitiveMap::new, entry("NAME", "Yoda"), entry("Job", "Jedi")),
array(entry("Name", "Yoda"), entry("Color", "Green")),
set(entry("Color", "Green")),
set(entry("job", "Jedi")))); // internal keys are always lowercase
}
private static Stream<Arguments> orderDependentFailureTestCases() {
return Stream.of(arguments(mapOf(LinkedHashMap::new, entry("name", "Yoda"), entry("job", "Jedi")),
array(entry("name", "Jedi"), entry("job", "Yoda")),
set(entry("name", "Jedi"), entry("job", "Yoda")),
set(entry("name", "Yoda"), entry("job", "Jedi"))));
}
@Test
void should_fail_with_MultiValueMapAdapter() {
// GIVEN
MultiValueMapAdapter<String, String> actual = new MultiValueMapAdapter<>(mapOf(entry("name", list("Yoda")),
entry("job", list("Jedi"))));
MapEntry<String, List<String>>[] expected = array(entry("name", list("Yoda")), entry("color", list("Green")));
Set<MapEntry<String, List<String>>> notFound = set(entry("color", list("Green")));
Set<MapEntry<String, List<String>>> notExpected = set(entry("job", list("Jedi")));
int initialSize = actual.size();
// WHEN
var error = expectAssertionError(() -> maps.assertContainsOnly(info, actual, expected, null));
// THEN
then(error).hasMessage(shouldContainOnly(actual, expected, notFound, notExpected).create());
then(actual).hasSize(initialSize);
}
@Test
void should_fail_with_MultivaluedHashMap() {
// GIVEN
MultivaluedHashMap<String, String> actual = new MultivaluedHashMap<>(mapOf(entry("name", "Yoda"),
entry("job", "Jedi")));
MapEntry<String, List<String>>[] expected = array(entry("name", list("Yoda")), entry("color", list("Green")));
Set<MapEntry<String, List<String>>> notFound = set(entry("color", list("Green")));
Set<MapEntry<String, List<String>>> notExpected = set(entry("job", list("Jedi")));
int initialSize = actual.size();
// WHEN
var error = expectAssertionError(() -> maps.assertContainsOnly(info, actual, expected, null));
// THEN
then(error).hasMessage(shouldContainOnly(actual, expected, notFound, notExpected).create());
then(actual).hasSize(initialSize);
}
}
| Maps_assertContainsOnly_Test |
java | elastic__elasticsearch | modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/OperationStats.java | {
"start": 582,
"end": 2086
} | class ____ {
final OperationPurpose purpose;
final StorageOperation operation;
/**
* total time taken for the operation
*/
long totalDuration;
/**
* true if last request is completed successfully
*/
boolean isLastRequestSucceed;
/**
* request attempts including retires and multi part requests
*/
int requestAttempts;
/**
* request errors, all unsuccessful request attempts {@code reqErr<=reqAtt}
*/
int requestError;
/**
* request throttles (429), {@code reqErrThrottle<=reqErr}
*/
int requestThrottle;
/**
* request range not satisfied error(416), only applicable for GetObject operations, {@code reqErrRange<=reqErr}
*/
int requestRangeError;
OperationStats(OperationPurpose purpose, StorageOperation operation) {
this.purpose = purpose;
this.operation = operation;
}
@Override
public String toString() {
return "OperationStats{"
+ "purpose="
+ purpose
+ ", operation="
+ operation
+ ", totalDuration="
+ totalDuration
+ ", isLastReqSuccess="
+ isLastRequestSucceed
+ ", reqAtt="
+ requestAttempts
+ ", reqErr="
+ requestError
+ ", reqErrThrottle="
+ requestThrottle
+ ", reqErrRange="
+ requestRangeError
+ '}';
}
}
| OperationStats |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e5/c/ForeignGeneratorViaMapsIdTest.java | {
"start": 680,
"end": 1356
} | class ____ {
@Test
public void testForeignGenerator(SessionFactoryScope scope) {
MetadataImplementor metadata = scope.getMetadataImplementor();
assertThat( SchemaUtil.isColumnPresent( "MedicalHistory", "patient_id", metadata ) ).isTrue();
Person e = new Person();
scope.inTransaction(
session -> {
session.persist( e );
MedicalHistory d = new MedicalHistory();
d.patient = e;
session.persist( d );
session.flush();
session.clear();
d = session.find( MedicalHistory.class, e.id );
assertThat( d.id ).isEqualTo( e.id );
session.remove( d );
session.remove( d.patient );
}
);
}
}
| ForeignGeneratorViaMapsIdTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContains_with_Integer_Arguments_Test.java | {
"start": 1622,
"end": 7474
} | class ____ extends ByteArraysBaseTest {
@Test
void should_pass_if_actual_contains_given_values() {
arrays.assertContains(someInfo(), actual, IntArrays.arrayOf(6));
}
@Test
void should_pass_if_actual_contains_given_values_in_different_order() {
arrays.assertContains(someInfo(), actual, IntArrays.arrayOf(8, 10));
}
@Test
void should_pass_if_actual_contains_all_given_values() {
arrays.assertContains(someInfo(), actual, IntArrays.arrayOf(6, 8, 10));
}
@Test
void should_pass_if_actual_contains_given_values_more_than_once() {
actual = ByteArrays.arrayOf(6, 8, 10, 10, 8);
arrays.assertContains(someInfo(), actual, IntArrays.arrayOf(8));
}
@Test
void should_pass_if_actual_contains_given_values_even_if_duplicated() {
arrays.assertContains(someInfo(), actual, IntArrays.arrayOf(6, 6));
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
actual = ByteArrays.emptyArray();
arrays.assertContains(someInfo(), actual, IntArrays.emptyArray());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContains(someInfo(), actual,
IntArrays.emptyArray()));
}
@Test
void should_throw_error_if_array_of_values_to_look_for_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertContains(someInfo(), actual, (int[]) null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContains(someInfo(), null,
IntArrays.arrayOf(8)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_values() {
AssertionInfo info = someInfo();
byte[] expected = { 6, 8, 9 };
Throwable error = catchThrowable(() -> arrays.assertContains(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContain(actual, expected, newLinkedHashSet((byte) 9)));
}
@Test
void should_pass_if_actual_contains_given_values_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContains(someInfo(), actual, IntArrays.arrayOf(6));
}
@Test
void should_pass_if_actual_contains_given_values_in_different_order_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContains(someInfo(), actual, IntArrays.arrayOf(-8, 10));
}
@Test
void should_pass_if_actual_contains_all_given_values_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContains(someInfo(), actual, IntArrays.arrayOf(6, -8, 10));
}
@Test
void should_pass_if_actual_contains_given_values_more_than_once_according_to_custom_comparison_strategy() {
actual = ByteArrays.arrayOf(6, -8, 10, 10, -8);
arraysWithCustomComparisonStrategy.assertContains(someInfo(), actual, IntArrays.arrayOf(-8));
}
@Test
void should_pass_if_actual_contains_given_values_even_if_duplicated_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContains(someInfo(), actual, IntArrays.arrayOf(6, 6));
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContains(someInfo(),
actual,
IntArrays.emptyArray()));
}
@Test
void should_throw_error_if_array_of_values_to_look_for_is_null_whatever_custom_comparison_strategy_is() {
assertThatNullPointerException().isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContains(someInfo(),
actual,
(int[]) null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContains(someInfo(),
null,
IntArrays.arrayOf(-8)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_values_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContains(info, actual,
IntArrays.arrayOf(6, -8, 9)));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContain(actual, ByteArrays.arrayOf(6, -8, 9), newLinkedHashSet((byte) 9),
absValueComparisonStrategy));
}
}
| ByteArrays_assertContains_with_Integer_Arguments_Test |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/geo/SpatialPointTests.java | {
"start": 3350,
"end": 3582
} | class ____ to be trivial, when SpatialPoint was a concrete class.
* If we ever revert back to a concrete class, we can simplify this test class.
* The only requirement is that it extends SpatialPoint, but have a different | used |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2000/Issue2086.java | {
"start": 324,
"end": 385
} | class ____ {
public void set() {
}
}
}
| Model |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/ml/TaskType.java | {
"start": 1092,
"end": 1866
} | enum ____ {
REGRESSION("regression"),
CLUSTERING("clustering"),
CLASSIFICATION("classification"),
EMBEDDING("embedding"),
TEXT_GENERATION("text_generation");
private final String name;
TaskType(String name) {
this.name = name;
}
public String getName() {
return name;
}
public static TaskType fromName(String name) {
return Arrays.stream(values())
.filter(taskType -> taskType.name.equals(name))
.findFirst()
.orElseThrow(() -> new IllegalArgumentException("Unknown task type: " + name));
}
public static boolean isValidTaskType(String name) {
return Arrays.stream(values()).anyMatch(taskType -> taskType.name.equals(name));
}
}
| TaskType |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 127485,
"end": 127542
} | interface ____ {}
/* Processor that generates an empty | Foo |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java | {
"start": 14855,
"end": 17502
} | class ____ extends QueryPhaseCollectorManager {
private final PartialHitCountCollector.HitsThresholdChecker hitsThresholdChecker;
private final SortAndFormats sortAndFormats;
EmptyHits(
Weight postFilterWeight,
QueryPhaseCollector.TerminateAfterChecker terminateAfterChecker,
CollectorManager<AggregatorCollector, Void> aggsCollectorManager,
Float minScore,
boolean profile,
@Nullable SortAndFormats sortAndFormats,
int trackTotalHitsUpTo
) {
super(postFilterWeight, terminateAfterChecker, aggsCollectorManager, minScore, profile);
this.sortAndFormats = sortAndFormats;
this.hitsThresholdChecker = new PartialHitCountCollector.HitsThresholdChecker(
trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED ? 0 : trackTotalHitsUpTo
);
}
@Override
protected PartialHitCountCollector newTopDocsCollector() {
return new PartialHitCountCollector(hitsThresholdChecker);
}
@Override
protected TopDocsAndMaxScore reduceTopDocsCollectors(Collection<Collector> collectors) {
int totalHitCount = 0;
boolean earlyTerminated = false;
for (Collector collector : collectors) {
PartialHitCountCollector partialHitCountCollector = (PartialHitCountCollector) collector;
totalHitCount += partialHitCountCollector.getTotalHits();
if (partialHitCountCollector.hasEarlyTerminated()) {
earlyTerminated = true;
}
}
final TotalHits totalHits = new TotalHits(
totalHitCount,
earlyTerminated ? TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO : TotalHits.Relation.EQUAL_TO
);
final TopDocs topDocs;
if (sortAndFormats != null) {
topDocs = new TopFieldDocs(totalHits, Lucene.EMPTY_SCORE_DOCS, sortAndFormats.sort.getSort());
} else {
topDocs = new TopDocs(totalHits, Lucene.EMPTY_SCORE_DOCS);
}
return new TopDocsAndMaxScore(topDocs, Float.NaN);
}
@Override
protected String getTopDocsProfilerReason() {
return CollectorResult.REASON_SEARCH_COUNT;
}
@Override
protected DocValueFormat[] getSortValueFormats() {
return null;
}
}
/**
* Collector manager used when size is greater than zero, meaning hits need to be collected.
*/
private static | EmptyHits |
java | resilience4j__resilience4j | resilience4j-spring/src/test/java/io/github/resilience4j/timelimiter/configure/TimeLimiterInitializationInAspectTest.java | {
"start": 1098,
"end": 2023
} | class ____ {
@Bean
public TimeLimiterRegistry timeLimiterRegistry() {
TimeLimiterConfig timeLimiterConfig = TimeLimiterConfig.custom()
.timeoutDuration(Duration.ofSeconds(3))
.build();
return TimeLimiterRegistry.of(
Map.of(BACKEND, timeLimiterConfig)
);
}
}
@Autowired
TimeLimiterDummyService testDummyService;
@Autowired
TimeLimiterRegistry registry;
@Before
public void setUp() {
// ensure no time limiters are initialized
assertThat(registry.getAllTimeLimiters()).isEmpty();
}
@Test
public void testCorrectConfigIsUsedInAspect() throws Exception {
// Should not time out because the time limit is 3 seconds
assertThat(testDummyService.success().toCompletableFuture().get())
.isEqualTo("ok");
}
}
| TestConfig |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/launch/JarLauncher.java | {
"start": 1046,
"end": 1326
} | class ____ extends ExecutableArchiveLauncher {
public JarLauncher() throws Exception {
}
protected JarLauncher(Archive archive) throws Exception {
super(archive);
}
public static void main(String[] args) throws Exception {
new JarLauncher().launch(args);
}
}
| JarLauncher |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/ImageHeaderParserUtils.java | {
"start": 11866,
"end": 11981
} | interface ____ {
ImageType getTypeAndRewind(ImageHeaderParser parser) throws IOException;
}
private | TypeReader |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/Mockito.java | {
"start": 115526,
"end": 115977
} | interface ____ which static mocks should be mocked.
* @param mockSettings the settings to use where only name and default answer are considered.
* @return mock controller
*/
public static <T> MockedStatic<T> mockStatic(Class<T> classToMock, MockSettings mockSettings) {
return MOCKITO_CORE.mockStatic(classToMock, mockSettings);
}
/**
* Creates a thread-local mock controller for all static methods of the given | of |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/cli/EnvironmentAwareCommand.java | {
"start": 1332,
"end": 6476
} | class ____ extends Command {
private static final String DOCKER_UPPERCASE_SETTING_PREFIX = "ES_SETTING_";
private static final Pattern DOCKER_LOWERCASE_SETTING_REGEX = Pattern.compile("[-a-z0-9_]+(\\.[-a-z0-9_]+)+");
private final OptionSpec<KeyValuePair> settingOption;
/**
* Construct the command with the specified command description. This command will have logging configured without reading Elasticsearch
* configuration files.
*
* @param description the command description
*/
public EnvironmentAwareCommand(final String description) {
super(description);
this.settingOption = parser.accepts("E", "Configure a setting").withRequiredArg().ofType(KeyValuePair.class);
}
@Override
protected void execute(Terminal terminal, OptionSet options, ProcessInfo processInfo) throws Exception {
execute(terminal, options, createEnv(options, processInfo), processInfo);
}
private static void putDockerEnvSettings(Map<String, String> settings, Map<String, String> envVars) {
for (var envVar : envVars.entrySet()) {
String key = envVar.getKey();
if (DOCKER_LOWERCASE_SETTING_REGEX.matcher(key).matches()) {
// all lowercase, like cluster.name, so just put directly
settings.put(key, envVar.getValue());
} else if (key.startsWith(DOCKER_UPPERCASE_SETTING_PREFIX)) {
// remove prefix
key = key.substring(DOCKER_UPPERCASE_SETTING_PREFIX.length());
// insert dots for underscores
key = key.replace('_', '.');
// unescape double dots, which were originally double underscores
key = key.replace("..", "_");
// lowercase the whole thing
key = key.toLowerCase(Locale.ROOT);
settings.put(key, envVar.getValue());
}
}
}
/** Create an {@link Environment} for the command to use. Overrideable for tests. */
protected Environment createEnv(OptionSet options, ProcessInfo processInfo) throws UserException {
final Map<String, String> settings = new HashMap<>();
for (final KeyValuePair kvp : settingOption.values(options)) {
if (kvp.value.isEmpty()) {
throw new UserException(ExitCodes.USAGE, "setting [" + kvp.key + "] must not be empty");
}
if (settings.containsKey(kvp.key)) {
final String message = String.format(Locale.ROOT, "setting [%s] set twice via command line -E", kvp.key);
throw new UserException(ExitCodes.USAGE, message);
}
settings.put(kvp.key, kvp.value);
}
if (getBuildType() == Build.Type.DOCKER) {
putDockerEnvSettings(settings, processInfo.envVars());
}
putSystemPropertyIfSettingIsMissing(processInfo.sysprops(), settings, "path.data", "es.path.data");
putSystemPropertyIfSettingIsMissing(processInfo.sysprops(), settings, "path.home", "es.path.home");
putSystemPropertyIfSettingIsMissing(processInfo.sysprops(), settings, "path.logs", "es.path.logs");
final String esPathConf = processInfo.sysprops().get("es.path.conf");
if (esPathConf == null) {
throw new UserException(ExitCodes.CONFIG, "the system property [es.path.conf] must be set");
}
return InternalSettingsPreparer.prepareEnvironment(
Settings.EMPTY,
settings,
getConfigPath(esPathConf),
// HOSTNAME is set by elasticsearch-env and elasticsearch-env.bat so it is always available
() -> processInfo.envVars().get("HOSTNAME")
);
}
// protected to allow tests to override
protected Build.Type getBuildType() {
return Build.current().type();
}
@SuppressForbidden(reason = "need path to construct environment")
private static Path getConfigPath(final String pathConf) {
return Paths.get(pathConf);
}
/** Ensure the given setting exists, reading it from system properties if not already set. */
private static void putSystemPropertyIfSettingIsMissing(
final Map<String, String> sysprops,
final Map<String, String> settings,
final String setting,
final String key
) throws UserException {
final String value = sysprops.get(key);
if (value != null) {
if (settings.containsKey(setting)) {
final String message = String.format(
Locale.ROOT,
"setting [%s] found via command-line -E and system property [%s]",
setting,
key
);
throw new UserException(ExitCodes.USAGE, message);
} else {
settings.put(setting, value);
}
}
}
/** Execute the command with the initialized {@link Environment}. */
public abstract void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception;
}
| EnvironmentAwareCommand |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-error/src/main/java/org/apache/maven/plugin/coreit/NoClassDefFoundErrorComponentMojo.java | {
"start": 1230,
"end": 1409
} | class ____ won't be
* loadable when that dependency is missing (in the runtime environment).
*/
@Mojo(name = "no-class-def-found-error-param", requiresProject = false)
public | itself |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/EqlUsageTransportAction.java | {
"start": 1406,
"end": 2892
} | class ____ extends XPackUsageFeatureTransportAction {
private final Client client;
@Inject
public EqlUsageTransportAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
Client client
) {
super(XPackUsageFeatureAction.EQL.name(), transportService, clusterService, threadPool, actionFilters);
this.client = client;
}
@Override
protected void localClusterStateOperation(
Task task,
XPackUsageRequest request,
ClusterState state,
ActionListener<XPackUsageFeatureResponse> listener
) {
EqlStatsRequest eqlRequest = new EqlStatsRequest();
eqlRequest.includeStats(true);
eqlRequest.setParentTask(clusterService.localNode().getId(), task.getId());
client.execute(EqlStatsAction.INSTANCE, eqlRequest, listener.delegateFailureAndWrap((delegate, r) -> {
List<Counters> countersPerNode = r.getNodes()
.stream()
.map(EqlStatsResponse.NodeStatsResponse::getStats)
.filter(Objects::nonNull)
.collect(Collectors.toList());
Counters mergedCounters = Counters.merge(countersPerNode);
EqlFeatureSetUsage usage = new EqlFeatureSetUsage(mergedCounters.toNestedMap());
delegate.onResponse(new XPackUsageFeatureResponse(usage));
}));
}
}
| EqlUsageTransportAction |
java | quarkusio__quarkus | extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/security/SecurityEventUtil.java | {
"start": 1491,
"end": 11891
} | class ____ {
public static final String QUARKUS_SECURITY_NAMESPACE = "quarkus.security.";
public static final String AUTHN_SUCCESS_EVENT_NAME = QUARKUS_SECURITY_NAMESPACE + "authentication.success";
public static final String AUTHN_FAILURE_EVENT_NAME = QUARKUS_SECURITY_NAMESPACE + "authentication.failure";
public static final String AUTHZ_SUCCESS_EVENT_NAME = QUARKUS_SECURITY_NAMESPACE + "authorization.success";
public static final String AUTHZ_FAILURE_EVENT_NAME = QUARKUS_SECURITY_NAMESPACE + "authorization.failure";
public static final String OTHER_EVENT_NAME = QUARKUS_SECURITY_NAMESPACE + "other";
public static final String SECURITY_IDENTITY_PRINCIPAL = QUARKUS_SECURITY_NAMESPACE + "identity.principal";
public static final String SECURITY_IDENTITY_IS_ANONYMOUS = QUARKUS_SECURITY_NAMESPACE + "identity.anonymous";
public static final String QUARKUS_SECURITY_OTHER_EVENTS_NAMESPACE = QUARKUS_SECURITY_NAMESPACE + "other.";
public static final String FAILURE_NAME = QUARKUS_SECURITY_NAMESPACE + "failure.name";
public static final String AUTHORIZATION_CONTEXT = QUARKUS_SECURITY_NAMESPACE + "authorization.context";
private SecurityEventUtil() {
// UTIL CLASS
}
/**
* Adds Span attributes describing authenticated user if the user is authenticated and CDI request context is active.
* This will be true for example inside JAX-RS resources when the CDI request context is already setup and user code
* creates a new Span.
*
* @param span valid and recording Span; must not be null
*/
static void addEndUserAttributes(Span span) {
if (Arc.container().requestContext().isActive()) {
var currentVertxRequest = Arc.container().instance(CurrentVertxRequest.class).get();
if (currentVertxRequest.getCurrent() != null) {
addEndUserAttribute(currentVertxRequest.getCurrent(), span);
}
}
}
/**
* Updates authenticated user Span attributes if the {@link SecurityIdentity} got augmented during authorization.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*
* @param event {@link AuthorizationFailureEvent}
*/
public static void updateEndUserAttributes(AuthorizationFailureEvent event) {
addEndUserAttribute(event.getSecurityIdentity(), getSpan());
}
/**
* Updates authenticated user Span attributes if the {@link SecurityIdentity} got augmented during authorization.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*
* @param event {@link AuthorizationSuccessEvent}
*/
public static void updateEndUserAttributes(AuthorizationSuccessEvent event) {
addEndUserAttribute(event.getSecurityIdentity(), getSpan());
}
/**
* If there is already valid recording {@link Span}, attributes describing authenticated user are added to it.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*
* @param event {@link AuthenticationSuccessEvent}
*/
public static void addEndUserAttributes(AuthenticationSuccessEvent event) {
addEndUserAttribute(event.getSecurityIdentity(), getSpan());
}
/**
* Adds {@link SecurityEvent} as Span event.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*/
public static void addAllEvents(SecurityEvent event) {
if (event instanceof AuthenticationSuccessEvent e) {
addEvent(e);
} else if (event instanceof AuthenticationFailureEvent e) {
addEvent(e);
} else if (event instanceof AuthorizationSuccessEvent e) {
addEvent(e);
} else if (event instanceof AuthorizationFailureEvent e) {
addEvent(e);
} else {
addOtherEventInternal(event);
}
}
/**
* Adds {@link AuthenticationSuccessEvent} as Span event.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*/
public static void addEvent(AuthenticationSuccessEvent event) {
addEvent(AUTHN_SUCCESS_EVENT_NAME, attributesBuilder(event).build());
}
/**
* Adds {@link AuthenticationFailureEvent} as Span event.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*/
public static void addEvent(AuthenticationFailureEvent event) {
addEvent(AUTHN_FAILURE_EVENT_NAME, attributesBuilder(event, AUTHENTICATION_FAILURE_KEY).build());
}
/**
* Adds {@link AuthorizationSuccessEvent} as Span event.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*/
public static void addEvent(AuthorizationSuccessEvent event) {
addEvent(AUTHZ_SUCCESS_EVENT_NAME,
withAuthorizationContext(event, attributesBuilder(event), AuthorizationSuccessEvent.AUTHORIZATION_CONTEXT));
}
/**
* Adds {@link AuthorizationFailureEvent} as Span event.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*/
public static void addEvent(AuthorizationFailureEvent event) {
addEvent(AUTHZ_FAILURE_EVENT_NAME, withAuthorizationContext(event, attributesBuilder(event, AUTHORIZATION_FAILURE_KEY),
AuthorizationFailureEvent.AUTHORIZATION_CONTEXT_KEY));
}
/**
* Adds {@link SecurityEvent} as Span event that is not authN/authZ success/failure.
*
* WARNING: This method is called from synthetic method observer. Any renaming must be reflected in the TracerProcessor.
*/
public static void addEvent(SecurityEvent event) {
if (!(event instanceof AuthenticationSuccessEvent || event instanceof AuthenticationFailureEvent
|| event instanceof AuthorizationSuccessEvent || event instanceof AuthorizationFailureEvent)) {
addOtherEventInternal(event);
}
}
private static void addOtherEventInternal(SecurityEvent event) {
var builder = attributesBuilder(event);
// add all event properties that are string, for example OIDC authentication server URL
event.getEventProperties().forEach(new BiConsumer<String, Object>() {
@Override
public void accept(String key, Object value) {
if (value instanceof String str) {
builder.put(QUARKUS_SECURITY_OTHER_EVENTS_NAMESPACE + key, str);
}
}
});
addEvent(OTHER_EVENT_NAME, builder.build());
}
private static void addEvent(String eventName, Attributes attributes) {
Span span = getSpan();
if (spanIsValidAndRecording(span)) {
span.addEvent(eventName, attributes, Instant.now());
}
}
private static AttributesBuilder attributesBuilder(SecurityEvent event, String failureKey) {
if (event.getEventProperties().get(failureKey) instanceof Throwable failure) {
return attributesBuilder(event).put(FAILURE_NAME, failure.getClass().getName());
}
return attributesBuilder(event);
}
private static AttributesBuilder attributesBuilder(SecurityEvent event) {
var builder = Attributes.builder();
SecurityIdentity identity = event.getSecurityIdentity();
if (identity != null) {
builder.put(SECURITY_IDENTITY_IS_ANONYMOUS, identity.isAnonymous());
if (identity.getPrincipal() != null) {
builder.put(SECURITY_IDENTITY_PRINCIPAL, identity.getPrincipal().getName());
}
}
return builder;
}
private static Attributes withAuthorizationContext(SecurityEvent event, AttributesBuilder builder, String contextKey) {
if (event.getEventProperties().containsKey(contextKey)) {
builder.put(AUTHORIZATION_CONTEXT, (String) event.getEventProperties().get(contextKey));
}
return builder.build();
}
/**
* Adds Span attributes describing the authenticated user.
*
* @param event {@link RoutingContext}; must not be null
* @param span valid recording Span; must not be null
*/
private static void addEndUserAttribute(RoutingContext event, Span span) {
if (event.user() instanceof QuarkusHttpUser user) {
addEndUserAttribute(user.getSecurityIdentity(), span);
}
}
/**
* Adds End User attributes to the {@code span}. Only authenticated user is added to the {@link Span}.
* Anonymous identity is ignored as it does not represent authenticated user.
* Passed {@code securityIdentity} is attached to the {@link Context} so that we recognize when identity changes.
*
* @param securityIdentity SecurityIdentity
* @param span Span
*/
private static void addEndUserAttribute(SecurityIdentity securityIdentity, Span span) {
if (securityIdentity != null && !securityIdentity.isAnonymous() && spanIsValidAndRecording(span)) {
span.setAllAttributes(Attributes.of(
ENDUSER_ID,
securityIdentity.getPrincipal().getName(),
ENDUSER_ROLE,
getRoles(securityIdentity)));
}
}
private static String getRoles(SecurityIdentity securityIdentity) {
try {
return securityIdentity.getRoles().toString();
} catch (UnsupportedOperationException e) {
// getting roles is not supported when the identity is enhanced by custom jakarta.ws.rs.core.SecurityContext
return "";
}
}
private static Span getSpan() {
if (Arc.container().requestContext().isActive()) {
return Arc.container().select(Span.class).get();
} else {
return Span.current();
}
}
private static boolean spanIsValidAndRecording(Span span) {
return span.isRecording() && span.getSpanContext().isValid();
}
}
| SecurityEventUtil |
java | apache__flink | flink-formats/flink-json/src/main/java/org/apache/flink/formats/json/JsonFormatOptions.java | {
"start": 4976,
"end": 5086
} | enum ____ {
FAIL,
DROP,
LITERAL
}
private JsonFormatOptions() {}
}
| MapNullKeyMode |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ConstantOverflowTest.java | {
"start": 2699,
"end": 2963
} | class ____ {
public static final int a = (int) (10 / 0.5);
}
""")
.doTest();
}
@Test
public void negativeCharCast() {
testHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingWithManyToOne.java | {
"start": 1578,
"end": 1867
} | class ____ {
@Id
@Column(name = "ID", nullable = false)
@SequenceGenerator(name = "ID", sequenceName = "ADDRESS_SEQ")
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "ID")
private Long id;
private String street;
}
@Entity(name = "Person")
public static | Address |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/transaction/TimedTransactionalSpringExtensionTests.java | {
"start": 2493,
"end": 3120
} | class ____ {
@Test
void springTransactionsWorkWithJUnitJupiterTimeouts() {
Events events = EngineTestKit.engine("junit-jupiter")
.selectors(selectClass(TestCase.class))
.execute()
.testEvents()
.assertStatistics(stats -> stats.started(4).succeeded(2).failed(2));
events.failed().assertThatEvents().haveExactly(2,
event(test("WithExceededJUnitJupiterTimeout"),
finishedWithFailure(
instanceOf(TimeoutException.class),
message(msg -> msg.endsWith("timed out after 10 milliseconds")))));
}
@SpringJUnitConfig
@Transactional
@FailingTestCase
static | TimedTransactionalSpringExtensionTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/event/NodeLabelsStoreEvent.java | {
"start": 917,
"end": 1083
} | class ____ extends
AbstractEvent<NodeLabelsStoreEventType> {
public NodeLabelsStoreEvent(NodeLabelsStoreEventType type) {
super(type);
}
} | NodeLabelsStoreEvent |
java | processing__processing4 | java/test/processing/mode/java/LibrarySearchRuntimePathFactoryTest.java | {
"start": 1023,
"end": 2024
} | class ____ {
private RuntimePathBuilder.RuntimePathFactoryStrategy factory;
private JavaMode testMode;
private List<ImportStatement> testImports;
private Sketch testSketch;
private List<String> classpath;
@Before
public void setUp() throws Exception {
RuntimePathBuilder builder = new RuntimePathBuilder();
factory = builder::buildLibrarySearchPath;
testMode = RuntimePathFactoryTestUtil.createTestJavaMode();
testImports = RuntimePathFactoryTestUtil.createTestImports();
testSketch = RuntimePathFactoryTestUtil.createTestSketch();
classpath = factory.buildClasspath(testMode, testImports, testSketch);
}
@Test
public void testBuildClasspathSize() {
assertEquals(3, classpath.size());
}
@Test
public void testBuildClasspathValues() {
assertTrue(classpath.get(0).contains("library3"));
assertTrue(classpath.get(1).contains("java.library4"));
assertTrue(classpath.get(2).contains("library5"));
}
} | LibrarySearchRuntimePathFactoryTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/exec/spi/LoadedValuesCollector.java | {
"start": 2141,
"end": 2875
} | interface ____ {
NavigablePath navigablePath();
ModelPart modelPart();
}
/**
* Details about a loaded entity.
*/
record LoadedEntityRegistration(
NavigablePath navigablePath,
EntityMappingType entityDescriptor,
EntityKey entityKey) implements LoadedPartRegistration {
@Override
public EntityMappingType modelPart() {
return entityDescriptor();
}
}
/**
* Details about a loaded collection.
*/
record LoadedCollectionRegistration(
NavigablePath navigablePath,
PluralAttributeMapping collectionDescriptor,
CollectionKey collectionKey) implements LoadedPartRegistration {
@Override
public PluralAttributeMapping modelPart() {
return collectionDescriptor();
}
}
}
| LoadedPartRegistration |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/ondelete/toone/hbm/GrandChild.java | {
"start": 186,
"end": 456
} | class ____ {
private Long id;
private Child parent;
public Child getParent() {
return parent;
}
public void setParent(Child parent) {
this.parent = parent;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
| GrandChild |
java | quarkusio__quarkus | integration-tests/jpa-postgresql-withxml/src/main/java/io/quarkus/it/jpa/postgresql/otherpu/XmlFormatMapper.java | {
"start": 355,
"end": 826
} | class ____ implements FormatMapper {
@Override
public <T> T fromString(CharSequence charSequence, JavaType<T> javaType, WrapperOptions wrapperOptions) {
throw new UnsupportedOperationException("I cannot convert anything from XML.");
}
@Override
public <T> String toString(T value, JavaType<T> javaType, WrapperOptions wrapperOptions) {
throw new UnsupportedOperationException("I cannot convert anything to XML.");
}
}
| XmlFormatMapper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/joinfetch/JoinFetchTest.java | {
"start": 1368,
"end": 8780
} | class ____ {
@Test
public void testJoinFetch(SessionFactoryScope scope) {
scope.inTransaction( (s) -> {
s.createQuery( "delete from Bid" ).executeUpdate();
s.createQuery( "delete from Comment" ).executeUpdate();
s.createQuery( "delete from Item" ).executeUpdate();
} );
Category cat = new Category( "Photography" );
Item i = new Item( cat, "Camera" );
Bid b = new Bid( i, 100.0f );
new Bid( i, 105.0f );
new Comment( i, "This looks like a really good deal" );
new Comment( i, "Is it the latest version?" );
new Comment( i, "<comment deleted>" );
scope.inTransaction( (s) -> {
s.persist( cat );
s.persist( i );
} );
scope.getSessionFactory().getCache().evictEntityData( Item.class );
scope.inTransaction( (s) -> {
Item i1 = s.get( Item.class, i.getId() );
assertFalse( Hibernate.isInitialized( i1.getBids() ) );
assertEquals( i1.getBids().size(), 2 );
assertFalse( Hibernate.isInitialized( i1.getComments() ) );
assertEquals( 3, i1.getComments().size() );
} );
scope.getSessionFactory().getCache().evictEntityData( Bid.class );
scope.inTransaction( (s) -> {
Bid b1 = s.get( Bid.class, b.getId() );
assertFalse( Hibernate.isInitialized( b1.getItem() ) );
assertFalse( Hibernate.isInitialized( b1.getItem().getComments() ) );
assertEquals( 3, b1.getItem().getComments().size() );
} );
scope.getSessionFactory().getCache().evictCollectionData( Item.class.getName() + ".bids" );
scope.inTransaction( (s) -> {
CriteriaBuilder criteriaBuilder = s.getCriteriaBuilder();
CriteriaQuery<Item> criteria = criteriaBuilder.createQuery( Item.class );
Root<Item> root = criteria.from( Item.class );
root.join( "bids" );
root.join( "comments" );
Item i1 = s.createQuery( criteria ).uniqueResult();
// Item i1 = (Item) s.createCriteria( Item.class )from
// .setFetchMode( "bids", FetchMode.SELECT )
// .setFetchMode( "comments", FetchMode.SELECT )
// .uniqueResult();
assertFalse( Hibernate.isInitialized( i1.getBids() ) );
assertFalse( Hibernate.isInitialized( i1.getComments() ) );
Bid b1 = (Bid) i1.getBids().iterator().next();
assertTrue( Hibernate.isInitialized( b1.getItem() ) );
} );
scope.inTransaction( (s) -> {
Item i1 = (Item) s.createQuery( "from Item i left join fetch i.bids left join fetch i.comments" )
.uniqueResult();
assertTrue( Hibernate.isInitialized( i1.getBids() ) );
assertTrue( Hibernate.isInitialized( i1.getComments() ) );
assertEquals( 3, i1.getComments().size() );
assertEquals( 2, i1.getBids().size() );
} );
scope.inTransaction( (s) -> {
Item i1 = (Item) s.getNamedQuery( Item.class.getName() + ".all" ).list().get( 0 );
assertTrue( Hibernate.isInitialized( i1.getBids() ) );
assertTrue( Hibernate.isInitialized( i1.getComments() ) );
assertEquals( 3, i1.getComments().size() );
assertEquals( 2, i1.getBids().size() );
} );
scope.inTransaction( (s) -> {
CriteriaBuilder criteriaBuilder = s.getCriteriaBuilder();
CriteriaQuery<Item> criteria = criteriaBuilder.createQuery( Item.class );
criteria.from( Item.class );
Item i1 = s.createQuery( criteria ).uniqueResult();
assertFalse( Hibernate.isInitialized( i1.getBids() ) );
assertFalse( Hibernate.isInitialized( i1.getComments() ) );
assertEquals( 3, i1.getComments().size() );
assertEquals( 2, i1.getBids().size() );
} );
scope.inTransaction( (s) -> {
List bids = s.createQuery( "select b from Bid b left join fetch b.item i left join fetch i.category" )
.list();
Bid bid = (Bid) bids.get( 0 );
assertTrue( Hibernate.isInitialized( bid.getItem() ) );
assertTrue( Hibernate.isInitialized( bid.getItem().getCategory() ) );
} );
scope.inTransaction( (s) -> {
List pairs = s.createQuery( "select i from Item i left join i.bids b left join fetch i.category" ).list();
Item item = (Item) pairs.get( 0 );
assertFalse( Hibernate.isInitialized( item.getBids() ) );
assertTrue( Hibernate.isInitialized( item.getCategory() ) );
s.clear();
pairs = s.createQuery( "select i, b from Item i left join i.bids b left join i.category" ).list();
item = (Item) ( (Object[]) pairs.get( 0 ) )[0];
assertFalse( Hibernate.isInitialized( item.getBids() ) );
assertFalse( Hibernate.isInitialized( item.getCategory() ) );
s.clear();
pairs = s.createQuery( "select i from Item i left join i.bids b left join i.category" ).list();
item = (Item) pairs.get( 0 );
assertFalse( Hibernate.isInitialized( item.getBids() ) );
assertFalse( Hibernate.isInitialized( item.getCategory() ) );
s.clear();
pairs = s.createQuery( "select b, i from Bid b left join b.item i left join fetch i.category" ).list();
Bid bid = (Bid) ( (Object[]) pairs.get( 0 ) )[0];
assertTrue( Hibernate.isInitialized( bid.getItem() ) );
assertTrue( Hibernate.isInitialized( bid.getItem().getCategory() ) );
s.clear();
pairs = s.createQuery( "select b, i from Bid b left join b.item i left join i.category" ).list();
bid = (Bid) ( (Object[]) pairs.get( 0 ) )[0];
assertTrue( Hibernate.isInitialized( bid.getItem() ) );
assertFalse( Hibernate.isInitialized( bid.getItem().getCategory() ) );
pairs = s.createQuery( "select b from Bid b left join b.item i left join i.category" ).list();
bid = (Bid) pairs.get( 0 );
assertTrue( Hibernate.isInitialized( bid.getItem() ) );
assertFalse( Hibernate.isInitialized( bid.getItem().getCategory() ) );
} );
scope.inTransaction( (s) -> {
s.createQuery( "delete from Bid" ).executeUpdate();
s.createQuery( "delete from Comment" ).executeUpdate();
s.createQuery( "delete from Item" ).executeUpdate();
s.createQuery( "delete from Category" ).executeUpdate();
} );
}
@Test
public void testJoinFetchManyToMany(SessionFactoryScope scope) {
Group group = new Group( "hibernate" );
scope.inTransaction( (s) -> {
User gavin = new User( "gavin" );
User max = new User( "max" );
group.getUsers().put( "gavin", gavin );
group.getUsers().put( "max", max );
gavin.getGroups().put( "hibernate", group );
max.getGroups().put( "hibernate", group );
s.persist( group );
} );
scope.inTransaction( (s) -> {
Group hb = s.get( Group.class, "hibernate" );
assertFalse( Hibernate.isInitialized( hb.getUsers() ) );
User gavin = (User) hb.getUsers().get( "gavin" );
assertFalse( Hibernate.isInitialized( gavin.getGroups() ) );
User max = s.get( User.class, "max" );
assertFalse( Hibernate.isInitialized( max.getGroups() ) );
} );
scope.inTransaction( (s) -> {
CriteriaBuilder criteriaBuilder = s.getCriteriaBuilder();
CriteriaQuery<Group> criteria = criteriaBuilder.createQuery( Group.class );
Root<Group> from = criteria.from( Group.class );
from.fetch( "users", JoinType.LEFT ).fetch( "groups" );
Group hb = s.createQuery( criteria ).uniqueResult();
// hb = (Group) s.createCriteria( Group.class )
// .setFetchMode( "users", FetchMode.JOIN )
// .setFetchMode( "users.groups", FetchMode.JOIN )
// .uniqueResult();
assertTrue( Hibernate.isInitialized( hb.getUsers() ) );
User gavin = (User) hb.getUsers().get( "gavin" );
assertTrue( Hibernate.isInitialized( gavin.getGroups() ) );
User max = s.get( User.class, "max" );
assertTrue( Hibernate.isInitialized( max.getGroups() ) );
} );
scope.inTransaction( (s) -> s.remove( group ) );
}
}
| JoinFetchTest |
java | apache__logging-log4j2 | log4j-api-java9/src/main/java/org/apache/logging/log4j/util/internal/SerializationUtil.java | {
"start": 943,
"end": 1204
} | class ____ {
public static final List<String> REQUIRED_JAVA_CLASSES = List.of();
public static final List<String> REQUIRED_JAVA_PACKAGES = List.of();
public static String stripArray(final Class<?> clazz) {
return null;
}
}
| SerializationUtil |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/client/TransportClient.java | {
"start": 3168,
"end": 12791
} | class ____ implements Closeable {
private static final SparkLogger logger = SparkLoggerFactory.getLogger(TransportClient.class);
private final Channel channel;
private final TransportResponseHandler handler;
@Nullable private String clientId;
private volatile boolean timedOut;
public TransportClient(Channel channel, TransportResponseHandler handler) {
this.channel = Objects.requireNonNull(channel);
this.handler = Objects.requireNonNull(handler);
this.timedOut = false;
}
public Channel getChannel() {
return channel;
}
public boolean isActive() {
return !timedOut && (channel.isOpen() || channel.isActive());
}
public SocketAddress getSocketAddress() {
return channel.remoteAddress();
}
/**
* Returns the ID used by the client to authenticate itself when authentication is enabled.
*
* @return The client ID, or null if authentication is disabled.
*/
public String getClientId() {
return clientId;
}
/**
* Sets the authenticated client ID. This is meant to be used by the authentication layer.
*
* Trying to set a different client ID after it's been set will result in an exception.
*/
public void setClientId(String id) {
JavaUtils.checkState(clientId == null, "Client ID has already been set.");
this.clientId = id;
}
/**
* Requests a single chunk from the remote side, from the pre-negotiated streamId.
*
* Chunk indices go from 0 onwards. It is valid to request the same chunk multiple times, though
* some streams may not support this.
*
* Multiple fetchChunk requests may be outstanding simultaneously, and the chunks are guaranteed
* to be returned in the same order that they were requested, assuming only a single
* TransportClient is used to fetch the chunks.
*
* @param streamId Identifier that refers to a stream in the remote StreamManager. This should
* be agreed upon by client and server beforehand.
* @param chunkIndex 0-based index of the chunk to fetch
* @param callback Callback invoked upon successful receipt of chunk, or upon any failure.
*/
public void fetchChunk(
long streamId,
int chunkIndex,
ChunkReceivedCallback callback) {
if (logger.isDebugEnabled()) {
logger.debug("Sending fetch chunk request {} to {}", chunkIndex, getRemoteAddress(channel));
}
StreamChunkId streamChunkId = new StreamChunkId(streamId, chunkIndex);
StdChannelListener listener = new StdChannelListener(streamChunkId) {
@Override
void handleFailure(String errorMsg, Throwable cause) {
handler.removeFetchRequest(streamChunkId);
callback.onFailure(chunkIndex, new IOException(errorMsg, cause));
}
};
handler.addFetchRequest(streamChunkId, callback);
channel.writeAndFlush(new ChunkFetchRequest(streamChunkId)).addListener(listener);
}
/**
* Request to stream the data with the given stream ID from the remote end.
*
* @param streamId The stream to fetch.
* @param callback Object to call with the stream data.
*/
public void stream(String streamId, StreamCallback callback) {
StdChannelListener listener = new StdChannelListener(streamId) {
@Override
void handleFailure(String errorMsg, Throwable cause) throws Exception {
callback.onFailure(streamId, new IOException(errorMsg, cause));
}
};
if (logger.isDebugEnabled()) {
logger.debug("Sending stream request for {} to {}", streamId, getRemoteAddress(channel));
}
// Need to synchronize here so that the callback is added to the queue and the RPC is
// written to the socket atomically, so that callbacks are called in the right order
// when responses arrive.
synchronized (this) {
handler.addStreamCallback(streamId, callback);
channel.writeAndFlush(new StreamRequest(streamId)).addListener(listener);
}
}
/**
* Sends an opaque message to the RpcHandler on the server-side. The callback will be invoked
* with the server's response or upon any failure.
*
* @param message The message to send.
* @param callback Callback to handle the RPC's reply.
* @return The RPC's id.
*/
public long sendRpc(ByteBuffer message, RpcResponseCallback callback) {
if (logger.isTraceEnabled()) {
logger.trace("Sending RPC to {}", getRemoteAddress(channel));
}
long requestId = requestId();
handler.addRpcRequest(requestId, callback);
RpcChannelListener listener = new RpcChannelListener(requestId, callback);
channel.writeAndFlush(new RpcRequest(requestId, new NioManagedBuffer(message)))
.addListener(listener);
return requestId;
}
/**
* Sends a MergedBlockMetaRequest message to the server. The response of this message is
* either a {@link MergedBlockMetaSuccess} or {@link RpcFailure}.
*
* @param appId applicationId.
* @param shuffleId shuffle id.
* @param shuffleMergeId shuffleMergeId is used to uniquely identify merging process
* of shuffle by an indeterminate stage attempt.
* @param reduceId reduce id.
* @param callback callback the handle the reply.
*/
public void sendMergedBlockMetaReq(
String appId,
int shuffleId,
int shuffleMergeId,
int reduceId,
MergedBlockMetaResponseCallback callback) {
long requestId = requestId();
if (logger.isTraceEnabled()) {
logger.trace(
"Sending RPC {} to fetch merged block meta to {}", requestId, getRemoteAddress(channel));
}
handler.addRpcRequest(requestId, callback);
RpcChannelListener listener = new RpcChannelListener(requestId, callback);
channel.writeAndFlush(
new MergedBlockMetaRequest(requestId, appId, shuffleId, shuffleMergeId,
reduceId)).addListener(listener);
}
/**
* Send data to the remote end as a stream. This differs from stream() in that this is a request
* to *send* data to the remote end, not to receive it from the remote.
*
* @param meta meta data associated with the stream, which will be read completely on the
* receiving end before the stream itself.
* @param data this will be streamed to the remote end to allow for transferring large amounts
* of data without reading into memory.
* @param callback handles the reply -- onSuccess will only be called when both message and data
* are received successfully.
*/
public long uploadStream(
ManagedBuffer meta,
ManagedBuffer data,
RpcResponseCallback callback) {
if (logger.isTraceEnabled()) {
logger.trace("Sending RPC to {}", getRemoteAddress(channel));
}
long requestId = requestId();
handler.addRpcRequest(requestId, callback);
RpcChannelListener listener = new RpcChannelListener(requestId, callback);
channel.writeAndFlush(new UploadStream(requestId, meta, data)).addListener(listener);
return requestId;
}
/**
* Synchronously sends an opaque message to the RpcHandler on the server-side, waiting for up to
* a specified timeout for a response.
*/
public ByteBuffer sendRpcSync(ByteBuffer message, long timeoutMs) {
final SettableFuture<ByteBuffer> result = SettableFuture.create();
sendRpc(message, new RpcResponseCallback() {
@Override
public void onSuccess(ByteBuffer response) {
try {
ByteBuffer copy = ByteBuffer.allocate(response.remaining());
copy.put(response);
// flip "copy" to make it readable
copy.flip();
result.set(copy);
} catch (Throwable t) {
logger.warn("Error in responding RPC callback", t);
result.setException(t);
}
}
@Override
public void onFailure(Throwable e) {
result.setException(e);
}
});
try {
return result.get(timeoutMs, TimeUnit.MILLISECONDS);
} catch (ExecutionException e) {
throw new RuntimeException(e.getCause());
} catch (Exception e) {
if (e instanceof RuntimeException re) throw re;
throw new RuntimeException(e);
}
}
/**
* Sends an opaque message to the RpcHandler on the server-side. No reply is expected for the
* message, and no delivery guarantees are made.
*
* @param message The message to send.
*/
public void send(ByteBuffer message) {
channel.writeAndFlush(new OneWayMessage(new NioManagedBuffer(message)));
}
/**
* Removes any state associated with the given RPC.
*
* @param requestId The RPC id returned by {@link #sendRpc(ByteBuffer, RpcResponseCallback)}.
*/
public void removeRpcRequest(long requestId) {
handler.removeRpcRequest(requestId);
}
/** Mark this channel as having timed out. */
public void timeOut() {
this.timedOut = true;
}
@VisibleForTesting
public TransportResponseHandler getHandler() {
return handler;
}
@Override
public void close() {
// Mark the connection as timed out, so we do not return a connection that's being closed
// from the TransportClientFactory if closing takes some time (e.g. with SSL)
this.timedOut = true;
// close should not take this long; use a timeout just to be safe
channel.close().awaitUninterruptibly(10, TimeUnit.SECONDS);
}
@Override
public String toString() {
return "TransportClient[remoteAddress=" + channel.remoteAddress() + "clientId=" + clientId +
",isActive=" + isActive() + "]";
}
private static long requestId() {
return Math.abs(UUID.randomUUID().getLeastSignificantBits());
}
private | TransportClient |
java | apache__camel | components/camel-mock/src/main/java/org/apache/camel/component/mock/TimeClause.java | {
"start": 1785,
"end": 7140
} | class ____ {
private final TimeClause clause;
private final int from;
private final int to;
public TimeClassUnit(TimeClause clause, int to) {
this(clause, -1, to);
}
public TimeClassUnit(TimeClause clause, int from, int to) {
this.clause = clause;
this.from = from;
this.to = to;
}
public TimeClause millis() {
period(TimeUnit.MILLISECONDS);
return clause;
}
public TimeClause seconds() {
period(TimeUnit.SECONDS);
return clause;
}
public TimeClause minutes() {
period(TimeUnit.MINUTES);
return clause;
}
private void period(TimeUnit unit) {
if (from > 0) {
timeFrom = new Time(from, unit);
}
timeTo = new Time(to, unit);
}
}
// DSL
// -------------------------------------------------------------------------
public TimeClassUnit noLaterThan(int period) {
return new TimeClassUnit(this, period);
}
public TimeClassUnit between(int from, int to) {
return new TimeClassUnit(this, from, to);
}
public void beforeNext() {
this.beforeNext = true;
}
public void afterPrevious() {
this.beforeNext = false;
}
// Implementation
// -------------------------------------------------------------------------
@Override
public Expression getLeft() {
return left;
}
@Override
public Expression getRight() {
return right;
}
@Override
public boolean matches(Exchange exchange) {
return matchesReturningFailureMessage(exchange) == null;
}
@Override
public String matchesReturningFailureMessage(Exchange exchange) {
// we must not store any state, so we can be thread-safe,
// and thus we offer this method which returns a failure message if
// we did not match
String answer = null;
// must be thread safe and store result in local objects
Object leftValue = left.evaluate(exchange, Object.class);
Object rightValue = right.evaluate(exchange, Object.class);
if (!matches(exchange, leftValue, rightValue)) {
answer = leftValue + " " + getOperator() + " " + rightValue;
}
return answer;
}
protected boolean matches(Exchange exchange, Object leftValue, Object rightValue) {
was = null;
boolean answer = true;
if (timeTo == null) {
throw new IllegalArgumentException("The time period has not been set. Ensure to include the time unit as well.");
}
Date currentDate = exchange.getProperty(Exchange.RECEIVED_TIMESTAMP, Date.class);
final Date otherDate = getOtherDate(leftValue, rightValue);
// if we could not grab the value, we hit a boundary (i.e., either 0 message or last message)
if (otherDate == null) {
return true;
}
// compute if we were within the allowed time range
Time current = new Time(currentDate.getTime(), TimeUnit.MILLISECONDS);
Time other = new Time(otherDate.getTime(), TimeUnit.MILLISECONDS);
// must absolute delta as when we hit the boundaries, the delta would negative
long delta = Math.abs(other.toMillis() - current.toMillis());
was = "delta: " + delta + " millis";
if (timeFrom != null) {
long from = timeFrom.toMillis();
answer = delta >= from;
}
if (answer) {
long to = timeTo.toMillis();
answer = delta <= to;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Evaluated time clause [{}] with current: {}, other: {} -> {}", this, currentDate, otherDate,
answer);
}
return answer;
}
private Date getOtherDate(Object leftValue, Object rightValue) {
// the other date is either the previous or the next
Date otherDate;
if (beforeNext) {
// grab the previous value (left)
if (leftValue != null) {
otherDate = (Date) leftValue;
} else {
// we hit a boundary so grab the other
otherDate = (Date) rightValue;
}
} else {
// grab the next value (right)
if (rightValue != null) {
otherDate = (Date) rightValue;
} else {
// we hit a boundary so grab the other
otherDate = (Date) leftValue;
}
}
return otherDate;
}
@Override
public String getOperator() {
return getOperationText();
}
protected String getOperationText() {
return beforeNext ? "before next" : "after previous";
}
@Override
public String toString() {
if (timeFrom == null) {
return "no later than " + timeTo + " " + getOperationText() + " (" + was + ")";
} else {
return "between " + timeFrom.getNumber() + "-" + timeTo.getNumber() + " "
+ timeTo.getTimeUnit().toString().toLowerCase(Locale.ENGLISH)
+ " " + getOperationText() + " (" + was + ")";
}
}
}
| TimeClassUnit |
java | spring-projects__spring-framework | spring-expression/src/test/java/org/springframework/expression/spel/SpelCompilationCoverageTests.java | {
"start": 266384,
"end": 266670
} | class ____ {
public Map<String,Long> record = new HashMap<>();
public LongHolder expression = new LongHolder();
public void add(String key, Long value) {
record.put(key, value);
}
public long get(String key) {
return record.get(key);
}
}
public static | RecordHolder |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/logging/Log.java | {
"start": 271,
"end": 408
} | class ____, during build time, replaced by invocations
* of the same methods on a generated instance of {@link Logger}.
*/
public final | are |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/util/PrimitiveArrayBuilder.java | {
"start": 4137,
"end": 4393
} | class ____ only deals with opaque "untyped" chunks.
* This works because {@link java.lang.System#arraycopy} does not
* take type; hence we can implement some aspects of primitive data
* handling in generic fashion.
*/
final static | which |
java | apache__kafka | test-common/test-common-util/src/main/java/org/apache/kafka/common/test/api/Flaky.java | {
"start": 1146,
"end": 1349
} | interface ____ {
/**
* Required reference to a KAFKA Jira ticket.
*/
String value();
/**
* Optional comment describing the reason.
*/
String comment() default "";
}
| Flaky |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/SlashPathRestClientTest.java | {
"start": 3885,
"end": 4080
} | interface ____ {
@GET
@Produces(MediaType.TEXT_PLAIN)
String echo();
}
@Path("/slash")
@Produces(MediaType.TEXT_PLAIN)
public static | HelloClientUsingClassName |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/AuthorizeHttpRequestsConfigurerTests.java | {
"start": 64501,
"end": 64697
} | class ____ {
int invocations;
@EventListener
void onRequestDenied(AuthorizationDeniedEvent<? extends HttpServletRequest> denied) {
this.invocations++;
}
}
}
| AuthorizationDeniedListener |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java | {
"start": 4821,
"end": 5888
} | class ____ {
@VisibleForTesting
static final Logger LOG = LoggerFactory.getLogger(
UserGroupInformation.class);
/**
* Percentage of the ticket window to use before we renew ticket.
*/
private static final float TICKET_RENEW_WINDOW = 0.80f;
private static boolean shouldRenewImmediatelyForTests = false;
static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
/**
* For the purposes of unit tests, we want to test login
* from keytab and don't want to wait until the renew
* window (controlled by TICKET_RENEW_WINDOW).
* @param immediate true if we should login without waiting for ticket window
*/
@VisibleForTesting
public static void setShouldRenewImmediatelyForTests(boolean immediate) {
shouldRenewImmediatelyForTests = immediate;
}
/**
* UgiMetrics maintains UGI activity statistics
* and publishes them through the metrics interfaces.
*/
@Metrics(about="User and group related metrics", context="ugi")
static | UserGroupInformation |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/message/ExtendedThreadInfoFactory.java | {
"start": 1436,
"end": 2400
} | class ____ implements ThreadInfoFactory {
public ExtendedThreadInfoFactory() {
final Method[] methods = ThreadInfo.class.getMethods();
boolean basic = true;
for (final Method method : methods) {
if (method.getName().equals("getLockInfo")) {
basic = false;
break;
}
}
if (basic) {
throw new IllegalStateException();
}
}
@Override
public Map<ThreadInformation, StackTraceElement[]> createThreadInfo() {
final ThreadMXBean bean = ManagementFactory.getThreadMXBean();
final ThreadInfo[] array = bean.dumpAllThreads(true, true);
final Map<ThreadInformation, StackTraceElement[]> threads = new HashMap<>(array.length);
for (final ThreadInfo info : array) {
threads.put(new ExtendedThreadInformation(info), info.getStackTrace());
}
return threads;
}
}
| ExtendedThreadInfoFactory |
java | apache__rocketmq | openmessaging/src/main/java/io/openmessaging/rocketmq/consumer/PullConsumerImpl.java | {
"start": 2054,
"end": 7585
} | class ____ implements PullConsumer {
private static final Logger log = LoggerFactory.getLogger(PullConsumerImpl.class);
private final DefaultMQPullConsumer rocketmqPullConsumer;
private final KeyValue properties;
private boolean started = false;
private final MQPullConsumerScheduleService pullConsumerScheduleService;
private final LocalMessageCache localMessageCache;
private final ClientConfig clientConfig;
public PullConsumerImpl(final KeyValue properties) {
this.properties = properties;
this.clientConfig = BeanUtils.populate(properties, ClientConfig.class);
String consumerGroup = clientConfig.getConsumerId();
if (null == consumerGroup || consumerGroup.isEmpty()) {
throw new OMSRuntimeException("-1", "Consumer Group is necessary for RocketMQ, please set it.");
}
pullConsumerScheduleService = new MQPullConsumerScheduleService(consumerGroup);
this.rocketmqPullConsumer = pullConsumerScheduleService.getDefaultMQPullConsumer();
if ("true".equalsIgnoreCase(System.getenv("OMS_RMQ_DIRECT_NAME_SRV"))) {
String accessPoints = clientConfig.getAccessPoints();
if (accessPoints == null || accessPoints.isEmpty()) {
throw new OMSRuntimeException("-1", "OMS AccessPoints is null or empty.");
}
this.rocketmqPullConsumer.setNamesrvAddr(accessPoints.replace(',', ';'));
}
this.rocketmqPullConsumer.setConsumerGroup(consumerGroup);
int maxReDeliveryTimes = clientConfig.getRmqMaxRedeliveryTimes();
this.rocketmqPullConsumer.setMaxReconsumeTimes(maxReDeliveryTimes);
String consumerId = OMSUtil.buildInstanceName();
this.rocketmqPullConsumer.setInstanceName(consumerId);
properties.put(OMSBuiltinKeys.CONSUMER_ID, consumerId);
this.rocketmqPullConsumer.setLanguage(LanguageCode.OMS);
this.localMessageCache = new LocalMessageCache(this.rocketmqPullConsumer, clientConfig);
}
@Override
public KeyValue attributes() {
return properties;
}
@Override
public PullConsumer attachQueue(String queueName) {
registerPullTaskCallback(queueName);
return this;
}
@Override
public PullConsumer attachQueue(String queueName, KeyValue attributes) {
registerPullTaskCallback(queueName);
return this;
}
@Override
public PullConsumer detachQueue(String queueName) {
this.rocketmqPullConsumer.getRegisterTopics().remove(queueName);
return this;
}
@Override
public Message receive() {
MessageExt rmqMsg = localMessageCache.poll();
return rmqMsg == null ? null : OMSUtil.msgConvert(rmqMsg);
}
@Override
public Message receive(final KeyValue properties) {
MessageExt rmqMsg = localMessageCache.poll(properties);
return rmqMsg == null ? null : OMSUtil.msgConvert(rmqMsg);
}
@Override
public void ack(final String messageId) {
localMessageCache.ack(messageId);
}
@Override
public void ack(final String messageId, final KeyValue properties) {
localMessageCache.ack(messageId);
}
@Override
public synchronized void startup() {
if (!started) {
try {
this.pullConsumerScheduleService.start();
this.localMessageCache.startup();
} catch (MQClientException e) {
throw new OMSRuntimeException("-1", e);
}
}
this.started = true;
}
private void registerPullTaskCallback(final String targetQueueName) {
this.pullConsumerScheduleService.registerPullTaskCallback(targetQueueName, new PullTaskCallback() {
@Override
public void doPullTask(final MessageQueue mq, final PullTaskContext context) {
MQPullConsumer consumer = context.getPullConsumer();
try {
long offset = localMessageCache.nextPullOffset(mq);
PullResult pullResult = consumer.pull(mq, "*",
offset, localMessageCache.nextPullBatchNums());
ProcessQueue pq = rocketmqPullConsumer.getDefaultMQPullConsumerImpl().getRebalanceImpl()
.getProcessQueueTable().get(mq);
switch (pullResult.getPullStatus()) {
case FOUND:
if (pq != null) {
pq.putMessage(pullResult.getMsgFoundList());
for (final MessageExt messageExt : pullResult.getMsgFoundList()) {
localMessageCache.submitConsumeRequest(new ConsumeRequest(messageExt, mq, pq));
}
}
break;
default:
break;
}
localMessageCache.updatePullOffset(mq, pullResult.getNextBeginOffset());
} catch (Exception e) {
log.error("An error occurred in pull message process.", e);
}
}
});
}
@Override
public synchronized void shutdown() {
if (this.started) {
this.localMessageCache.shutdown();
this.pullConsumerScheduleService.shutdown();
this.rocketmqPullConsumer.shutdown();
}
this.started = false;
}
}
| PullConsumerImpl |
java | spring-projects__spring-boot | module/spring-boot-pulsar/src/test/java/org/springframework/boot/pulsar/autoconfigure/PulsarAutoConfigurationTests.java | {
"start": 24132,
"end": 28892
} | class ____ {
private final ApplicationContextRunner contextRunner = PulsarAutoConfigurationTests.this.contextRunner;
@Test
@SuppressWarnings("unchecked")
void whenHasUserDefinedBeanDoesNotAutoConfigureBean() {
PulsarProducerFactory<String> producerFactory = mock(PulsarProducerFactory.class);
this.contextRunner
.withBean("customPulsarProducerFactory", PulsarProducerFactory.class, () -> producerFactory)
.run((context) -> assertThat(context).getBean(PulsarProducerFactory.class).isSameAs(producerFactory));
}
@Test
void whenNoPropertiesUsesCachingPulsarProducerFactory() {
this.contextRunner.run((context) -> assertThat(context).getBean(PulsarProducerFactory.class)
.isExactlyInstanceOf(CachingPulsarProducerFactory.class));
}
@Test
void whenCachingDisabledUsesDefaultPulsarProducerFactory() {
this.contextRunner.withPropertyValues("spring.pulsar.producer.cache.enabled=false")
.run((context) -> assertThat(context).getBean(PulsarProducerFactory.class)
.isExactlyInstanceOf(DefaultPulsarProducerFactory.class));
}
@Test
void whenCachingEnabledUsesCachingPulsarProducerFactory() {
this.contextRunner.withPropertyValues("spring.pulsar.producer.cache.enabled=true")
.run((context) -> assertThat(context).getBean(PulsarProducerFactory.class)
.isExactlyInstanceOf(CachingPulsarProducerFactory.class));
}
@Test
void whenCachingEnabledAndCaffeineNotOnClasspathStillUsesCaffeine() {
this.contextRunner.withClassLoader(new FilteredClassLoader(Caffeine.class))
.withPropertyValues("spring.pulsar.producer.cache.enabled=true")
.run((context) -> {
assertThat(context).getBean(CachingPulsarProducerFactory.class)
.extracting("producerCache")
.extracting(Object::getClass)
.isEqualTo(CaffeineCacheProvider.class);
assertThat(context).getBean(CachingPulsarProducerFactory.class)
.extracting("producerCache.cache")
.extracting(Object::getClass)
.extracting(Class::getName)
.asString()
.startsWith("org.springframework.pulsar.shade.com.github.benmanes.caffeine.cache.");
});
}
@Test
void whenCustomCachingPropertiesCreatesConfiguredBean() {
this.contextRunner
.withPropertyValues("spring.pulsar.producer.cache.expire-after-access=100s",
"spring.pulsar.producer.cache.maximum-size=5150",
"spring.pulsar.producer.cache.initial-capacity=200")
.run((context) -> assertThat(context).getBean(CachingPulsarProducerFactory.class)
.extracting("producerCache.cache.cache")
.hasFieldOrPropertyWithValue("maximum", 5150L)
.hasFieldOrPropertyWithValue("expiresAfterAccessNanos", TimeUnit.SECONDS.toNanos(100)));
}
@Test
void whenHasTopicNamePropertyCreatesConfiguredBean() {
this.contextRunner.withPropertyValues("spring.pulsar.producer.topic-name=my-topic")
.run((context) -> assertThat(context).getBean(DefaultPulsarProducerFactory.class)
.hasFieldOrPropertyWithValue("defaultTopic", "my-topic"));
}
@Test
void injectsExpectedBeans() {
this.contextRunner
.withPropertyValues("spring.pulsar.producer.topic-name=my-topic",
"spring.pulsar.producer.cache.enabled=false")
.run((context) -> assertThat(context).getBean(DefaultPulsarProducerFactory.class)
.hasFieldOrPropertyWithValue("pulsarClient", context.getBean(PulsarClient.class))
.hasFieldOrPropertyWithValue("topicResolver", context.getBean(TopicResolver.class))
.extracting("topicBuilder")
.isNotNull());
}
@Test
void hasNoTopicBuilderWhenTopicDefaultsAreDisabled() {
this.contextRunner.withPropertyValues("spring.pulsar.defaults.topic.enabled=false")
.run((context) -> assertThat(context).getBean(DefaultPulsarProducerFactory.class)
.extracting("topicBuilder")
.isNull());
}
@ParameterizedTest
@ValueSource(booleans = { true, false })
<T> void whenHasUserDefinedCustomizersAppliesInCorrectOrder(boolean cachingEnabled) {
this.contextRunner
.withPropertyValues("spring.pulsar.producer.cache.enabled=" + cachingEnabled,
"spring.pulsar.producer.name=fromPropsCustomizer")
.withUserConfiguration(ProducerBuilderCustomizersConfig.class)
.run((context) -> {
DefaultPulsarProducerFactory<?> producerFactory = context
.getBean(DefaultPulsarProducerFactory.class);
Customizers<ProducerBuilderCustomizer<T>, ProducerBuilder<T>> customizers = Customizers
.of(ProducerBuilder.class, ProducerBuilderCustomizer::customize);
assertThat(customizers.fromField(producerFactory, "defaultConfigCustomizers")).callsInOrder(
ProducerBuilder::producerName, "fromPropsCustomizer", "fromCustomizer1", "fromCustomizer2");
});
}
@TestConfiguration(proxyBeanMethods = false)
static | ProducerFactoryTests |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotatedElementUtilsTests.java | {
"start": 62327,
"end": 62399
} | class ____ {
}
@Resource(name = "x")
@RegEx
static | SpringAppConfigClass |
java | apache__flink | flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/header/util/GetInfoHeaders.java | {
"start": 1359,
"end": 2621
} | class ____
implements SqlGatewayMessageHeaders<
EmptyRequestBody, GetInfoResponseBody, EmptyMessageParameters> {
private static final String URL = "/info";
private static final GetInfoHeaders INSTANCE = new GetInfoHeaders();
private GetInfoHeaders() {}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.GET;
}
@Override
public String getTargetRestEndpointURL() {
return URL;
}
@Override
public Class<GetInfoResponseBody> getResponseClass() {
return GetInfoResponseBody.class;
}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public String getDescription() {
return "Get meta data for this cluster.";
}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public EmptyMessageParameters getUnresolvedMessageParameters() {
return EmptyMessageParameters.getInstance();
}
public static GetInfoHeaders getInstance() {
return INSTANCE;
}
@Override
public String operationId() {
return "getInfo";
}
}
| GetInfoHeaders |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/source/presencecheck/spi/SoccerTeamTarget.java | {
"start": 297,
"end": 835
} | class ____ {
private List<String> players;
private String goalKeeperName;
public List<String> getPlayers() {
return players;
}
public void addPlayer(String player) {
if ( this.players == null ) {
this.players = new ArrayList<>();
}
this.players.add( player );
}
public String getGoalKeeperName() {
return goalKeeperName;
}
public void setGoalKeeperName(String goalKeeperName) {
this.goalKeeperName = goalKeeperName;
}
}
| SoccerTeamTarget |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/NXYSignificanceHeuristic.java | {
"start": 5232,
"end": 5319
} | class
____.N_0 = supersetSize;
// documents that are in | frequencies |
java | quarkusio__quarkus | extensions/reactive-routes/deployment/src/test/java/io/quarkus/vertx/web/DeleteUploadedFilesOnEndTest.java | {
"start": 1526,
"end": 3104
} | class ____ {
private static final String UPLOADS_DIR = "target/delete-uploaded-files-on-end-" + UUID.randomUUID().toString();
private static final byte[] CAFEBABE_BYTES = new byte[] { 0xc, 0xa, 0xf, 0xe, 0xb, 0xa, 0xb, 0xe };
@RegisterExtension
static final QuarkusUnitTest CONFIG = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap
.create(JavaArchive.class).addClasses(Routes.class)
.addAsResource(new StringAsset("quarkus.http.body.delete-uploaded-files-on-end = true\n" //
+ "quarkus.http.body.handle-file-uploads = true\n" //
+ "quarkus.http.body.uploads-directory = " + UPLOADS_DIR + "\n"), "application.properties"));
@Test
public void upload() throws IOException {
final String cafeBabe = "cafe babe";
final String uploadedPath = RestAssured.given().contentType("multipart/form-data")
.multiPart("file", "bytes.bin", CAFEBABE_BYTES).formParam("description", cafeBabe)
.formParam("echoAttachment", "bytes.bin").post("/vertx-web/upload") //
.then().statusCode(200).extract().body().asString();
Assertions.assertFalse(uploadedPath.trim().isEmpty());
/* Wait up to 5 seconds for the file to disappear */
final long deadline = System.currentTimeMillis() + 5000;
while (Files.exists(Paths.get(uploadedPath)) && System.currentTimeMillis() < deadline) {
}
Assertions.assertFalse(Files.exists(Paths.get(uploadedPath)));
}
public static | DeleteUploadedFilesOnEndTest |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/annotation/AttributeMethods.java | {
"start": 1243,
"end": 10137
} | class ____ {
static final AttributeMethods NONE = new AttributeMethods(null, new Method[0]);
static final Map<Class<? extends Annotation>, AttributeMethods> cache = new ConcurrentReferenceHashMap<>();
private static final Comparator<Method> methodComparator = (m1, m2) -> {
if (m1 != null && m2 != null) {
return m1.getName().compareTo(m2.getName());
}
return (m1 != null ? -1 : 1);
};
private final @Nullable Class<? extends Annotation> annotationType;
private final Method[] attributeMethods;
private final boolean[] canThrowTypeNotPresentException;
private final boolean hasDefaultValueMethod;
private final boolean hasNestedAnnotation;
private AttributeMethods(@Nullable Class<? extends Annotation> annotationType, Method[] attributeMethods) {
this.annotationType = annotationType;
this.attributeMethods = attributeMethods;
this.canThrowTypeNotPresentException = new boolean[attributeMethods.length];
boolean foundDefaultValueMethod = false;
boolean foundNestedAnnotation = false;
for (int i = 0; i < attributeMethods.length; i++) {
Method method = this.attributeMethods[i];
Class<?> type = method.getReturnType();
if (!foundDefaultValueMethod && (method.getDefaultValue() != null)) {
foundDefaultValueMethod = true;
}
if (!foundNestedAnnotation && (type.isAnnotation() || (type.isArray() && type.componentType().isAnnotation()))) {
foundNestedAnnotation = true;
}
ReflectionUtils.makeAccessible(method);
this.canThrowTypeNotPresentException[i] = (type == Class.class || type == Class[].class || type.isEnum());
}
this.hasDefaultValueMethod = foundDefaultValueMethod;
this.hasNestedAnnotation = foundNestedAnnotation;
}
/**
* Determine if values from the given annotation can be safely accessed without
* causing any {@link TypeNotPresentException TypeNotPresentExceptions}.
* <p>This method is designed to cover Google App Engine's late arrival of such
* exceptions for {@code Class} values (instead of the more typical early
* {@code Class.getAnnotations() failure} on a regular JVM).
* @param annotation the annotation to check
* @return {@code true} if all values are present
* @see #validate(Annotation)
*/
boolean canLoad(Annotation annotation) {
assertAnnotation(annotation);
for (int i = 0; i < size(); i++) {
if (canThrowTypeNotPresentException(i)) {
try {
AnnotationUtils.invokeAnnotationMethod(get(i), annotation);
}
catch (IllegalStateException ex) {
// Plain invocation failure to expose -> leave up to attribute retrieval
// (if any) where such invocation failure will be logged eventually.
}
catch (Throwable ex) {
// TypeNotPresentException etc. -> annotation type not actually loadable.
return false;
}
}
}
return true;
}
/**
* Check if values from the given annotation can be safely accessed without causing
* any {@link TypeNotPresentException TypeNotPresentExceptions}.
* <p>This method is designed to cover Google App Engine's late arrival of such
* exceptions for {@code Class} values (instead of the more typical early
* {@code Class.getAnnotations() failure} on a regular JVM).
* @param annotation the annotation to validate
* @throws IllegalStateException if a declared {@code Class} attribute could not be read
* @see #canLoad(Annotation)
*/
void validate(Annotation annotation) {
assertAnnotation(annotation);
for (int i = 0; i < size(); i++) {
if (canThrowTypeNotPresentException(i)) {
try {
AnnotationUtils.invokeAnnotationMethod(get(i), annotation);
}
catch (IllegalStateException ex) {
throw ex;
}
catch (Throwable ex) {
throw new IllegalStateException("Could not obtain annotation attribute value for " +
get(i).getName() + " declared on @" + getName(annotation.annotationType()), ex);
}
}
}
}
private void assertAnnotation(Annotation annotation) {
Assert.notNull(annotation, "Annotation must not be null");
if (this.annotationType != null) {
Assert.isInstanceOf(this.annotationType, annotation);
}
}
/**
* Get the attribute with the specified name or {@code null} if no
* matching attribute exists.
* @param name the attribute name to find
* @return the attribute method or {@code null}
*/
@Nullable Method get(String name) {
int index = indexOf(name);
return (index != -1 ? this.attributeMethods[index] : null);
}
/**
* Get the attribute at the specified index.
* @param index the index of the attribute to return
* @return the attribute method
* @throws IndexOutOfBoundsException if the index is out of range
* ({@code index < 0 || index >= size()})
*/
Method get(int index) {
return this.attributeMethods[index];
}
/**
* Determine if the attribute at the specified index could throw a
* {@link TypeNotPresentException} when accessed.
* @param index the index of the attribute to check
* @return {@code true} if the attribute can throw a
* {@link TypeNotPresentException}
*/
boolean canThrowTypeNotPresentException(int index) {
return this.canThrowTypeNotPresentException[index];
}
/**
* Get the index of the attribute with the specified name, or {@code -1}
* if there is no attribute with the name.
* @param name the name to find
* @return the index of the attribute, or {@code -1}
*/
int indexOf(String name) {
for (int i = 0; i < this.attributeMethods.length; i++) {
if (this.attributeMethods[i].getName().equals(name)) {
return i;
}
}
return -1;
}
/**
* Get the index of the specified attribute, or {@code -1} if the
* attribute is not in this collection.
* @param attribute the attribute to find
* @return the index of the attribute, or {@code -1}
*/
int indexOf(Method attribute) {
for (int i = 0; i < this.attributeMethods.length; i++) {
if (this.attributeMethods[i].equals(attribute)) {
return i;
}
}
return -1;
}
/**
* Get the number of attributes in this collection.
* @return the number of attributes
*/
int size() {
return this.attributeMethods.length;
}
/**
* Determine if at least one of the attribute methods has a default value.
* @return {@code true} if there is at least one attribute method with a default value
*/
boolean hasDefaultValueMethod() {
return this.hasDefaultValueMethod;
}
/**
* Determine if at least one of the attribute methods is a nested annotation.
* @return {@code true} if there is at least one attribute method with a nested
* annotation type
*/
boolean hasNestedAnnotation() {
return this.hasNestedAnnotation;
}
/**
* Get the attribute methods for the given annotation type.
* @param annotationType the annotation type
* @return the attribute methods for the annotation type
*/
static AttributeMethods forAnnotationType(@Nullable Class<? extends Annotation> annotationType) {
if (annotationType == null) {
return NONE;
}
return cache.computeIfAbsent(annotationType, AttributeMethods::compute);
}
@SuppressWarnings("NullAway") // Dataflow analysis limitation
private static AttributeMethods compute(Class<? extends Annotation> annotationType) {
Method[] methods = annotationType.getDeclaredMethods();
int size = methods.length;
for (int i = 0; i < methods.length; i++) {
if (!isAttributeMethod(methods[i])) {
//noinspection DataFlowIssue
methods[i] = null;
size--;
}
}
if (size == 0) {
return NONE;
}
Arrays.sort(methods, methodComparator);
Method[] attributeMethods = Arrays.copyOf(methods, size);
return new AttributeMethods(annotationType, attributeMethods);
}
private static boolean isAttributeMethod(Method method) {
return (method.getParameterCount() == 0 && method.getReturnType() != void.class);
}
/**
* Create a description for the given attribute method suitable to use in
* exception messages and logs.
* @param attribute the attribute to describe
* @return a description of the attribute
*/
static String describe(@Nullable Method attribute) {
if (attribute == null) {
return "(none)";
}
return describe(attribute.getDeclaringClass(), attribute.getName());
}
/**
* Create a description for the given attribute method suitable to use in
* exception messages and logs.
* @param annotationType the annotation type
* @param attributeName the attribute name
* @return a description of the attribute
*/
static String describe(@Nullable Class<?> annotationType, @Nullable String attributeName) {
if (attributeName == null) {
return "(none)";
}
String in = (annotationType != null ? " in annotation [" + annotationType.getName() + "]" : "");
return "attribute '" + attributeName + "'" + in;
}
private static String getName(Class<?> clazz) {
String canonicalName = clazz.getCanonicalName();
return (canonicalName != null ? canonicalName : clazz.getName());
}
}
| AttributeMethods |
java | elastic__elasticsearch | x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java | {
"start": 2048,
"end": 7383
} | class ____ extends TransportTasksAction<
RollupJobTask,
GetRollupJobsAction.Request,
GetRollupJobsAction.Response,
GetRollupJobsAction.Response> {
private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportGetRollupCapsAction.class);
private final ProjectResolver projectResolver;
@Inject
public TransportGetRollupJobAction(
TransportService transportService,
ActionFilters actionFilters,
ClusterService clusterService,
ProjectResolver projectResolver
) {
super(
GetRollupJobsAction.NAME,
clusterService,
transportService,
actionFilters,
GetRollupJobsAction.Request::new,
GetRollupJobsAction.Response::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.projectResolver = projectResolver;
}
@Override
protected void doExecute(Task task, GetRollupJobsAction.Request request, ActionListener<GetRollupJobsAction.Response> listener) {
DEPRECATION_LOGGER.warn(DeprecationCategory.API, DEPRECATION_KEY, DEPRECATION_MESSAGE);
final ClusterState state = clusterService.state();
final DiscoveryNodes nodes = state.nodes();
if (nodes.isLocalNodeElectedMaster()) {
final ProjectMetadata project = projectResolver.getProjectMetadata(state);
if (stateHasRollupJobs(request, project)) {
super.doExecute(task, request, listener);
} else {
// If we couldn't find the job in the persistent task CS, it means it was deleted prior to this GET
// and we can just send an empty response, no need to go looking for the allocated task
listener.onResponse(new GetRollupJobsAction.Response(Collections.emptyList()));
}
} else {
// Delegates GetJobs to elected master node, so it becomes the coordinating node.
// Non-master nodes may have a stale cluster state that shows jobs which are cancelled
// on the master, which makes testing difficult.
if (nodes.getMasterNode() == null) {
listener.onFailure(new MasterNotDiscoveredException());
} else {
transportService.sendRequest(
nodes.getMasterNode(),
actionName,
request,
new ActionListenerResponseHandler<>(
listener,
GetRollupJobsAction.Response::new,
TransportResponseHandler.TRANSPORT_WORKER
)
);
}
}
}
/**
* Check to see if the PersistentTask's cluster state contains the rollup job(s) we are interested in
*/
static boolean stateHasRollupJobs(GetRollupJobsAction.Request request, ProjectMetadata project) {
boolean hasRollupJobs = false;
PersistentTasksCustomMetadata pTasksMeta = project.custom(PersistentTasksCustomMetadata.TYPE);
if (pTasksMeta != null) {
// If the request was for _all rollup jobs, we need to look through the list of
// persistent tasks and see if at least once has a RollupJob param
if (request.getId().equals(Metadata.ALL)) {
hasRollupJobs = pTasksMeta.tasks()
.stream()
.anyMatch(persistentTask -> persistentTask.getTaskName().equals(RollupField.TASK_NAME));
} else if (pTasksMeta.getTask(request.getId()) != null) {
// If we're looking for a single job, we can just check directly
hasRollupJobs = true;
}
}
return hasRollupJobs;
}
@Override
protected void taskOperation(
CancellableTask actionTask,
GetRollupJobsAction.Request request,
RollupJobTask jobTask,
ActionListener<GetRollupJobsAction.Response> listener
) {
List<GetRollupJobsAction.JobWrapper> jobs = Collections.emptyList();
assert jobTask.getConfig().getId().equals(request.getId()) || request.getId().equals(Metadata.ALL);
// Little extra insurance, make sure we only return jobs that aren't cancelled
if (jobTask.isCancelled() == false) {
GetRollupJobsAction.JobWrapper wrapper = new GetRollupJobsAction.JobWrapper(
jobTask.getConfig(),
jobTask.getStats(),
(RollupJobStatus) jobTask.getStatus()
);
jobs = Collections.singletonList(wrapper);
}
listener.onResponse(new GetRollupJobsAction.Response(jobs));
}
@Override
protected GetRollupJobsAction.Response newResponse(
GetRollupJobsAction.Request request,
List<GetRollupJobsAction.Response> tasks,
List<TaskOperationFailure> taskOperationFailures,
List<FailedNodeException> failedNodeExceptions
) {
List<GetRollupJobsAction.JobWrapper> jobs = tasks.stream()
.map(GetRollupJobsAction.Response::getJobs)
.flatMap(Collection::stream)
.collect(Collectors.toList());
return new GetRollupJobsAction.Response(jobs, taskOperationFailures, failedNodeExceptions);
}
}
| TransportGetRollupJobAction |
java | apache__camel | core/camel-cloud/src/main/java/org/apache/camel/impl/cloud/DefaultServiceLoadBalancerFactory.java | {
"start": 1223,
"end": 1581
} | class ____ implements ServiceLoadBalancerFactory {
@Override
public ServiceLoadBalancer newInstance(CamelContext camelContext) throws Exception {
DefaultServiceLoadBalancer loadBalancer = new DefaultServiceLoadBalancer();
loadBalancer.setCamelContext(camelContext);
return loadBalancer;
}
}
| DefaultServiceLoadBalancerFactory |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/log/DebugLogger.java | {
"start": 3142,
"end": 4013
} | class ____ extends DebugLogger {
private final String prefix;
SystemErrDebugLogger(Class<?> sourceClass) {
this.prefix = "LOADER: " + sourceClass + " : ";
}
@Override
public void log(String message) {
print(message);
}
@Override
public void log(String message, Object arg1) {
print(message.formatted(arg1));
}
@Override
public void log(String message, Object arg1, Object arg2) {
print(message.formatted(arg1, arg2));
}
@Override
public void log(String message, Object arg1, Object arg2, Object arg3) {
print(message.formatted(arg1, arg2, arg3));
}
@Override
public void log(String message, Object arg1, Object arg2, Object arg3, Object arg4) {
print(message.formatted(arg1, arg2, arg3, arg4));
}
private void print(String message) {
System.err.println(this.prefix + message);
}
}
}
| SystemErrDebugLogger |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/OptionalOfRedundantMethodTest.java | {
"start": 2265,
"end": 2631
} | class ____ {
void f() {
Optional.ofNullable("test").ifPresent(String::length);
}
}
""")
.doTest();
}
@Test
public void positive_orElse() {
compilationTestHelper
.addSourceLines(
"Test.java",
"""
import java.util.Optional;
| Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestNodesPage.java | {
"start": 1907,
"end": 7790
} | class ____ {
final int numberOfRacks = 2;
final int numberOfNodesPerRack = 8;
// The following is because of the way TestRMWebApp.mockRMContext creates
// nodes.
final int numberOfLostNodesPerRack = 1;
// Number of Actual Table Headers for NodesPage.NodesBlock might change in
// future. In that case this value should be adjusted to the new value.
private final int numberOfThInMetricsTable = 25;
private final int numberOfActualTableHeaders = 18;
private final int numberOfThForOpportunisticContainers = 4;
private Injector injector;
@BeforeEach
public void setUp() throws Exception {
setUpInternal(false);
}
private void setUpInternal(final boolean useDRC) throws Exception {
final RMContext mockRMContext =
TestRMWebApp.mockRMContext(3, numberOfRacks, numberOfNodesPerRack,
8 * TestRMWebApp.GiB);
injector =
WebAppTests.createMockInjector(RMContext.class, mockRMContext,
new Module() {
@Override
public void configure(Binder binder) {
try {
binder.bind(ResourceManager.class).toInstance(
TestRMWebApp.mockRm(mockRMContext, useDRC));
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
});
}
@Test
public void testNodesBlockRender() throws Exception {
injector.getInstance(NodesBlock.class).render();
PrintWriter writer = injector.getInstance(PrintWriter.class);
WebAppTests.flushOutput(injector);
verify(writer, times(numberOfActualTableHeaders + numberOfThInMetricsTable)).print("<th");
verify(writer, times(numberOfThInMetricsTable)).print("<td");
}
@Test
public void testNodesBlockRenderForLostNodes() {
NodesBlock nodesBlock = injector.getInstance(NodesBlock.class);
nodesBlock.set("node.state", "lost");
nodesBlock.render();
PrintWriter writer = injector.getInstance(PrintWriter.class);
WebAppTests.flushOutput(injector);
verify(writer,
times(numberOfActualTableHeaders + numberOfThInMetricsTable))
.print("<th");
verify(writer, times(numberOfThInMetricsTable))
.print("<td");
}
@Test
public void testNodesBlockRenderForLostNodesWithGPUResources()
throws Exception {
Map<String, ResourceInformation> oldRtMap =
ResourceUtils.getResourceTypes();
CustomResourceTypesConfigurationProvider.
initResourceTypes(ResourceInformation.GPU_URI);
this.setUpInternal(true);
try {
// Test gpu as a custom resource.
//<th class="yarn io/gpu">
// yarn.io/gpu Used
//</th>
//<th class="yarn io/gpu">
// yarn.io/gpu Avail
//</th>
this.testNodesBlockRenderForLostNodesWithGPU();
} finally {
ResourceUtils.initializeResourcesFromResourceInformationMap(oldRtMap);
}
}
public void testNodesBlockRenderForLostNodesWithGPU() {
NodesBlock nodesBlock = injector.getInstance(NodesBlock.class);
nodesBlock.set("node.state", "lost");
nodesBlock.render();
PrintWriter writer = injector.getInstance(PrintWriter.class);
WebAppTests.flushOutput(injector);
verify(writer,
times(numberOfActualTableHeaders
+ numberOfThInMetricsTable + 2))
.print("<th");
verify(writer, times(numberOfThInMetricsTable))
.print("<td");
}
@Test
public void testNodesBlockRenderForNodeLabelFilterWithNonEmptyLabel() {
NodesBlock nodesBlock = injector.getInstance(NodesBlock.class);
nodesBlock.set("node.label", "x");
nodesBlock.render();
PrintWriter writer = injector.getInstance(PrintWriter.class);
WebAppTests.flushOutput(injector);
verify(writer, times(numberOfThInMetricsTable))
.print("<td");
verify(writer, times(1)).print("<script");
}
@Test
public void testNodesBlockRenderForNodeLabelFilterWithEmptyLabel() {
NodesBlock nodesBlock = injector.getInstance(NodesBlock.class);
nodesBlock.set("node.label", "");
nodesBlock.render();
PrintWriter writer = injector.getInstance(PrintWriter.class);
WebAppTests.flushOutput(injector);
verify(writer, times(numberOfThInMetricsTable))
.print("<td");
}
@Test
public void testNodesBlockRenderForNodeLabelFilterWithAnyLabel() {
NodesBlock nodesBlock = injector.getInstance(NodesBlock.class);
nodesBlock.set("node.label", "*");
nodesBlock.render();
PrintWriter writer = injector.getInstance(PrintWriter.class);
WebAppTests.flushOutput(injector);
verify(writer, times(numberOfThInMetricsTable))
.print("<td");
}
@Test
public void testNodesBlockRenderForOpportunisticContainers() {
final RMContext mockRMContext =
TestRMWebApp.mockRMContext(3, numberOfRacks, numberOfNodesPerRack,
8 * TestRMWebApp.GiB);
mockRMContext.getYarnConfiguration().setBoolean(
YarnConfiguration.OPPORTUNISTIC_CONTAINER_ALLOCATION_ENABLED, true);
injector =
WebAppTests.createMockInjector(RMContext.class, mockRMContext,
new Module() {
@Override
public void configure(Binder binder) {
try {
binder.bind(ResourceManager.class).toInstance(
TestRMWebApp.mockRm(mockRMContext));
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
});
injector.getInstance(NodesBlock.class).render();
PrintWriter writer = injector.getInstance(PrintWriter.class);
WebAppTests.flushOutput(injector);
verify(writer, times(
numberOfActualTableHeaders + numberOfThInMetricsTable +
numberOfThForOpportunisticContainers)).print("<th");
verify(writer, times(numberOfThInMetricsTable))
.print("<td");
}
}
| TestNodesPage |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/model/BindingGraph.java | {
"start": 15809,
"end": 16272
} | interface ____ extends Node {
/** The component that owns the binding, or in which the binding is missing. */
@Override
ComponentPath componentPath();
/** The key of the binding, or for which there is no binding. */
Key key();
/** The binding, or empty if missing. */
Optional<Binding> binding();
}
/** A node in the binding graph that represents a missing binding for a key in a component. */
public abstract static | MaybeBinding |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/ClientAndServerSharingResponseTest.java | {
"start": 1033,
"end": 1526
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Endpoint.class, HeadersService.class));
@Test
public void test() {
when().get("/test/client")
.then()
.statusCode(200)
.body(containsString("{\"Accept\":\"application/json\"}"));
}
@RegisterRestClient
public | ClientAndServerSharingResponseTest |
java | resilience4j__resilience4j | resilience4j-spring/src/main/java/io/github/resilience4j/utils/AspectUtil.java | {
"start": 910,
"end": 1069
} | class ____ {
private AspectUtil() {
}
/**
* @param context the spring condition context
* @param classToCheck the | AspectUtil |
java | google__guice | core/src/com/google/inject/internal/Annotations.java | {
"start": 10782,
"end": 11683
} | interface ____ {
String value();
}
@TestAnnotation("determineAnnotationToStringConfig")
private static AnnotationToStringConfig determineAnnotationToStringConfig() {
try {
String annotation =
Annotations.class
.getDeclaredMethod("determineAnnotationToStringConfig")
.getAnnotation(TestAnnotation.class)
.toString();
boolean quote = annotation.contains("\"determineAnnotationToStringConfig\"");
boolean includeMemberName = annotation.contains("value=");
boolean dollarSeparator = annotation.contains("Annotations$TestAnnotation");
return new AnnotationToStringConfig(quote, includeMemberName, dollarSeparator);
} catch (NoSuchMethodException e) {
throw new AssertionError(e);
}
}
/** Checks for the presence of annotations. Caches results because Android doesn't. */
static | TestAnnotation |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableFromIterable.java | {
"start": 4220,
"end": 7796
} | class ____<T> extends BaseRangeSubscription<T> {
private static final long serialVersionUID = -6022804456014692607L;
final Subscriber<? super T> downstream;
IteratorSubscription(Subscriber<? super T> actual, Iterator<? extends T> it) {
super(it);
this.downstream = actual;
}
@Override
void fastPath() {
Iterator<? extends T> it = this.iterator;
Subscriber<? super T> a = downstream;
for (;;) {
if (cancelled) {
return;
}
T t;
try {
t = it.next();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (cancelled) {
return;
}
if (t == null) {
a.onError(new NullPointerException("Iterator.next() returned a null value"));
return;
} else {
a.onNext(t);
}
if (cancelled) {
return;
}
boolean b;
try {
b = it.hasNext();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (!b) {
if (!cancelled) {
a.onComplete();
}
return;
}
}
}
@Override
void slowPath(long r) {
long e = 0L;
Iterator<? extends T> it = this.iterator;
Subscriber<? super T> a = downstream;
for (;;) {
while (e != r) {
if (cancelled) {
return;
}
T t;
try {
t = it.next();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (cancelled) {
return;
}
if (t == null) {
a.onError(new NullPointerException("Iterator.next() returned a null value"));
return;
} else {
a.onNext(t);
}
if (cancelled) {
return;
}
boolean b;
try {
b = it.hasNext();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (!b) {
if (!cancelled) {
a.onComplete();
}
return;
}
e++;
}
r = get();
if (e == r) {
r = addAndGet(-e);
if (r == 0L) {
return;
}
e = 0L;
}
}
}
}
static final | IteratorSubscription |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/annotation/EmptyAnnotationMetadata.java | {
"start": 1038,
"end": 16863
} | class ____ implements AnnotationMetadata {
@Override
public boolean hasPropertyExpressions() {
return false;
}
@Override
public <E extends Enum<E>> E[] enumValues(@NonNull String annotation, Class<E> enumType) {
return (E[]) Array.newInstance(enumType, 0);
}
@Override
public <E extends Enum<E>> E[] enumValues(@NonNull String annotation, @NonNull String member, Class<E> enumType) {
return (E[]) Array.newInstance(enumType, 0);
}
@Override
public <E extends Enum<E>> E[] enumValues(@NonNull Class<? extends Annotation> annotation, Class<E> enumType) {
return (E[]) Array.newInstance(enumType, 0);
}
@Override
public <E extends Enum<E>> E[] enumValues(@NonNull Class<? extends Annotation> annotation, @NonNull String member, Class<E> enumType) {
return (E[]) Array.newInstance(enumType, 0);
}
@NonNull
@Override
public List<String> getAnnotationNamesByStereotype(@Nullable String stereotype) {
return Collections.emptyList();
}
@NonNull
@Override
public Set<String> getAnnotationNames() {
return Collections.emptySet();
}
@NonNull
@Override
public Set<String> getDeclaredAnnotationNames() {
return Collections.emptySet();
}
@NonNull
@Override
public List<String> getDeclaredAnnotationNamesByStereotype(@Nullable String stereotype) {
return Collections.emptyList();
}
@NonNull
@Override
public <T> OptionalValues<T> getValues(@NonNull String annotation, @NonNull Class<T> valueType) {
//noinspection unchecked
return OptionalValues.EMPTY_VALUES;
}
@Override
public <T> Optional<T> getDefaultValue(@NonNull String annotation, @NonNull String member, @NonNull Argument<T> requiredType) {
return Optional.empty();
}
@NonNull
@Override
public <T extends Annotation> List<AnnotationValue<T>> getAnnotationValuesByType(@NonNull Class<T> annotationType) {
return Collections.emptyList();
}
@NonNull
@Override
public <T extends Annotation> List<AnnotationValue<T>> getDeclaredAnnotationValuesByType(@NonNull Class<T> annotationType) {
return Collections.emptyList();
}
@Override
public boolean hasDeclaredAnnotation(@Nullable String annotation) {
return false;
}
@Override
public boolean hasAnnotation(@Nullable String annotation) {
return false;
}
@Override
public boolean hasSimpleAnnotation(@Nullable String annotation) {
return false;
}
@Override
public boolean hasSimpleDeclaredAnnotation(@Nullable String annotation) {
return false;
}
@Override
public boolean hasStereotype(@Nullable String annotation) {
return false;
}
@Override
public boolean hasDeclaredStereotype(@Nullable String annotation) {
return false;
}
@NonNull
@Override
public Map<CharSequence, Object> getDefaultValues(@NonNull String annotation) {
return Collections.emptyMap();
}
@Override
public <T> Optional<T> getDefaultValue(@NonNull String annotation, @NonNull String member, @NonNull Class<T> requiredType) {
return Optional.empty();
}
@Override
public <T> Optional<T> getDefaultValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member, @NonNull Argument<T> requiredType) {
return Optional.empty();
}
@Override
public boolean isAnnotationPresent(@NonNull Class<? extends Annotation> annotationClass) {
return false;
}
@Override
public boolean isDeclaredAnnotationPresent(@NonNull Class<? extends Annotation> annotationClass) {
return false;
}
@Override
public <T> Optional<T> getDefaultValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member, @NonNull Class<T> requiredType) {
return Optional.empty();
}
@Override
public <T> Optional<T> getValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member, @NonNull Class<T> requiredType) {
return Optional.empty();
}
@Override
public <T> Optional<T> getValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member, @NonNull Argument<T> requiredType) {
return Optional.empty();
}
@Override
public Optional<String> getAnnotationNameByStereotype(@Nullable String stereotype) {
return Optional.empty();
}
@Override
public Optional<String> getDeclaredAnnotationNameByStereotype(@Nullable String stereotype) {
return Optional.empty();
}
@Override
public Optional<Class<? extends Annotation>> getAnnotationTypeByStereotype(@NonNull Class<? extends Annotation> stereotype) {
return Optional.empty();
}
@Override
public Optional<Class<? extends Annotation>> getDeclaredAnnotationTypeByStereotype(@NonNull Class<? extends Annotation> stereotype) {
return Optional.empty();
}
@Override
public Optional<Class<? extends Annotation>> getDeclaredAnnotationTypeByStereotype(@Nullable String stereotype) {
return Optional.empty();
}
@Override
public Optional<Class<? extends Annotation>> getAnnotationType(@NonNull String name) {
return Optional.empty();
}
@Override
public Optional<Class<? extends Annotation>> getAnnotationType(@NonNull String name, @NonNull ClassLoader classLoader) {
return Optional.empty();
}
@Override
public Optional<Class<? extends Annotation>> getAnnotationTypeByStereotype(@Nullable String stereotype) {
return Optional.empty();
}
@Override
public Optional<String> getAnnotationNameByStereotype(@NonNull Class<? extends Annotation> stereotype) {
return Optional.empty();
}
@NonNull
@Override
public <T> OptionalValues<T> getValues(@NonNull Class<? extends Annotation> annotation, @NonNull Class<T> valueType) {
//noinspection unchecked
return OptionalValues.EMPTY_VALUES;
}
@NonNull
@Override
public List<String> getAnnotationNamesByStereotype(@NonNull Class<? extends Annotation> stereotype) {
return Collections.emptyList();
}
@NonNull
@Override
public List<Class<? extends Annotation>> getAnnotationTypesByStereotype(@NonNull Class<? extends Annotation> stereotype) {
return Collections.emptyList();
}
@NonNull
@Override
public List<Class<? extends Annotation>> getAnnotationTypesByStereotype(@NonNull String stereotype) {
return Collections.emptyList();
}
@Override
public <T extends Annotation> Optional<AnnotationValue<T>> findAnnotation(@NonNull Class<T> annotationClass) {
return Optional.empty();
}
@Override
public <T extends Annotation> Optional<AnnotationValue<T>> findDeclaredAnnotation(@NonNull Class<T> annotationClass) {
return Optional.empty();
}
@Override
public <T> Optional<T> getValue(@NonNull String annotation, @NonNull String member, @NonNull Class<T> requiredType) {
return Optional.empty();
}
@Override
public <T> Optional<T> getValue(@NonNull String annotation, @NonNull String member, @NonNull Argument<T> requiredType) {
return Optional.empty();
}
@Override
public OptionalLong longValue(@NonNull String annotation, @NonNull String member) {
return OptionalLong.empty();
}
@Override
public OptionalLong longValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return OptionalLong.empty();
}
@Override
public <E extends Enum<E>> Optional<E> enumValue(@NonNull String annotation, Class<E> enumType) {
return Optional.empty();
}
@Override
public <E extends Enum<E>> Optional<E> enumValue(@NonNull String annotation, @NonNull String member, Class<E> enumType) {
return Optional.empty();
}
@Override
public <E extends Enum<E>> Optional<E> enumValue(@NonNull Class<? extends Annotation> annotation, Class<E> enumType) {
return Optional.empty();
}
@Override
public <E extends Enum<E>> Optional<E> enumValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member, Class<E> enumType) {
return Optional.empty();
}
@NonNull
@Override
public <T> Class<T>[] classValues(@NonNull String annotation) {
return (Class<T>[]) ReflectionUtils.EMPTY_CLASS_ARRAY;
}
@NonNull
@Override
public <T> Class<T>[] classValues(@NonNull String annotation, @NonNull String member) {
return (Class<T>[]) ReflectionUtils.EMPTY_CLASS_ARRAY;
}
@NonNull
@Override
public <T> Class<T>[] classValues(@NonNull Class<? extends Annotation> annotation) {
return (Class<T>[]) ReflectionUtils.EMPTY_CLASS_ARRAY;
}
@NonNull
@Override
public <T> Class<T>[] classValues(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return (Class<T>[]) ReflectionUtils.EMPTY_CLASS_ARRAY;
}
@Override
public Optional<Class> classValue(@NonNull String annotation) {
return Optional.empty();
}
@Override
public Optional<Class> classValue(@NonNull String annotation, @NonNull String member) {
return Optional.empty();
}
@Override
public Optional<Class> classValue(@NonNull Class<? extends Annotation> annotation) {
return Optional.empty();
}
@Override
public Optional<Class> classValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return Optional.empty();
}
@Override
public OptionalInt intValue(@NonNull String annotation, @NonNull String member) {
return OptionalInt.empty();
}
@Override
public OptionalInt intValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return OptionalInt.empty();
}
@Override
public OptionalInt intValue(@NonNull Class<? extends Annotation> annotation) {
return OptionalInt.empty();
}
@Override
public Optional<String> stringValue(@NonNull String annotation, @NonNull String member) {
return Optional.empty();
}
@Override
public Optional<String> stringValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return Optional.empty();
}
@NonNull
@Override
public Optional<String> stringValue(@NonNull Class<? extends Annotation> annotation) {
return Optional.empty();
}
@NonNull
@Override
public Optional<String> stringValue(@NonNull String annotation) {
return Optional.empty();
}
@Override
public Optional<Boolean> booleanValue(@NonNull String annotation, @NonNull String member) {
return Optional.empty();
}
@Override
public Optional<Boolean> booleanValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return Optional.empty();
}
@NonNull
@Override
public Optional<Boolean> booleanValue(@NonNull Class<? extends Annotation> annotation) {
return Optional.empty();
}
@NonNull
@Override
public Optional<Boolean> booleanValue(@NonNull String annotation) {
return Optional.empty();
}
@NonNull
@Override
public String[] stringValues(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return StringUtils.EMPTY_STRING_ARRAY;
}
@NonNull
@Override
public String[] stringValues(@NonNull Class<? extends Annotation> annotation) {
return StringUtils.EMPTY_STRING_ARRAY;
}
@NonNull
@Override
public String[] stringValues(@NonNull String annotation, @NonNull String member) {
return StringUtils.EMPTY_STRING_ARRAY;
}
@NonNull
@Override
public String[] stringValues(@NonNull String annotation) {
return StringUtils.EMPTY_STRING_ARRAY;
}
@NonNull
@Override
public OptionalDouble doubleValue(@NonNull String annotation, @NonNull String member) {
return OptionalDouble.empty();
}
@NonNull
@Override
public OptionalDouble doubleValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return OptionalDouble.empty();
}
@NonNull
@Override
public OptionalDouble doubleValue(@NonNull Class<? extends Annotation> annotation) {
return OptionalDouble.empty();
}
@NonNull
@Override
public <T> Optional<T> getValue(@NonNull String annotation, @NonNull Class<T> requiredType) {
return Optional.empty();
}
@NonNull
@Override
public Optional<Object> getValue(@NonNull String annotation, @NonNull String member) {
return Optional.empty();
}
@NonNull
@Override
public Optional<Object> getValue(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return Optional.empty();
}
@Override
public boolean isTrue(@NonNull String annotation, @NonNull String member) {
return false;
}
@Override
public boolean isTrue(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return false;
}
@Override
public boolean isPresent(@NonNull String annotation, @NonNull String member) {
return false;
}
@Override
public boolean isPresent(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return false;
}
@Override
public boolean isFalse(@NonNull Class<? extends Annotation> annotation, @NonNull String member) {
return true;
}
@Override
public boolean isFalse(@NonNull String annotation, @NonNull String member) {
return true;
}
@NonNull
@Override
public Optional<Object> getValue(@NonNull String annotation) {
return Optional.empty();
}
@NonNull
@Override
public Optional<Object> getValue(@NonNull Class<? extends Annotation> annotation) {
return Optional.empty();
}
@NonNull
@Override
public <T> Optional<T> getValue(@NonNull Class<? extends Annotation> annotation, @NonNull Class<T> requiredType) {
return Optional.empty();
}
@NonNull
@Override
public <T> Optional<T> getValue(@NonNull Class<? extends Annotation> annotation, @NonNull Argument<T> requiredType) {
return Optional.empty();
}
@NonNull
@Override
public <T> Optional<T> getValue(@NonNull String annotation, @NonNull Argument<T> requiredType) {
return Optional.empty();
}
@Override
public boolean hasAnnotation(@Nullable Class<? extends Annotation> annotation) {
return false;
}
@Override
public boolean hasStereotype(@Nullable Class<? extends Annotation> annotation) {
return false;
}
@Override
public boolean hasStereotype(@Nullable Class<? extends Annotation>... annotations) {
return false;
}
@Override
public boolean hasStereotype(@Nullable String[] annotations) {
return false;
}
@Override
public boolean hasDeclaredAnnotation(@Nullable Class<? extends Annotation> annotation) {
return false;
}
@Override
public boolean hasDeclaredStereotype(@Nullable Class<? extends Annotation> stereotype) {
return false;
}
@Override
public boolean hasDeclaredStereotype(@Nullable Class<? extends Annotation>... annotations) {
return false;
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public AnnotationMetadata copyAnnotationMetadata() {
return this;
}
}
| EmptyAnnotationMetadata |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java | {
"start": 723,
"end": 2796
} | class ____ extends ActionResponse implements ToXContentObject {
private final String authenticationRequestUrl;
/*
* The oAuth2 state parameter used for CSRF protection.
*/
private final String state;
/*
* String value used to associate a Client session with an ID Token, and to mitigate replay attacks.
*/
private final String nonce;
/*
* String value: name of the realm used to perform authentication.
*/
private final String realmName;
public OpenIdConnectPrepareAuthenticationResponse(String authorizationEndpointUrl, String state, String nonce, String realmName) {
this.authenticationRequestUrl = authorizationEndpointUrl;
this.state = state;
this.nonce = nonce;
this.realmName = realmName;
}
public String getAuthenticationRequestUrl() {
return authenticationRequestUrl;
}
public String getState() {
return state;
}
public String getNonce() {
return nonce;
}
public String getRealmName() {
return realmName;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(authenticationRequestUrl);
out.writeString(state);
out.writeString(nonce);
out.writeString(realmName);
}
public String toString() {
return "{authenticationRequestUrl="
+ authenticationRequestUrl
+ ", state="
+ state
+ ", nonce="
+ nonce
+ ", realmName"
+ realmName
+ "}";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("redirect", authenticationRequestUrl);
builder.field("state", state);
builder.field("nonce", nonce);
if (realmName != null) {
builder.field("realm", realmName);
}
builder.endObject();
return builder;
}
}
| OpenIdConnectPrepareAuthenticationResponse |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/TaskLocalStateStoreImplTest.java | {
"start": 1810,
"end": 11659
} | class ____ {
protected @TempDir Path temporaryFolder;
protected File[] allocationBaseDirs;
protected TaskLocalStateStoreImpl taskLocalStateStore;
protected JobID jobID;
protected AllocationID allocationID;
protected JobVertexID jobVertexID;
protected int subtaskIdx;
@BeforeEach
void before() throws Exception {
jobID = new JobID();
allocationID = new AllocationID();
jobVertexID = new JobVertexID();
subtaskIdx = 0;
this.allocationBaseDirs =
new File[] {
TempDirUtils.newFolder(temporaryFolder), TempDirUtils.newFolder(temporaryFolder)
};
this.taskLocalStateStore =
createTaskLocalStateStoreImpl(
allocationBaseDirs, jobID, allocationID, jobVertexID, subtaskIdx);
}
@Nonnull
private TaskLocalStateStoreImpl createTaskLocalStateStoreImpl(
File[] allocationBaseDirs,
JobID jobID,
AllocationID allocationID,
JobVertexID jobVertexID,
int subtaskIdx) {
LocalSnapshotDirectoryProviderImpl directoryProvider =
new LocalSnapshotDirectoryProviderImpl(
allocationBaseDirs, jobID, jobVertexID, subtaskIdx);
LocalRecoveryConfig localRecoveryConfig =
LocalRecoveryConfig.backupAndRecoveryEnabled(directoryProvider);
return new TaskLocalStateStoreImpl(
jobID,
allocationID,
jobVertexID,
subtaskIdx,
localRecoveryConfig,
Executors.directExecutor());
}
/** Test that the instance delivers a correctly configured LocalRecoveryDirectoryProvider. */
@Test
void getLocalRecoveryRootDirectoryProvider() {
LocalRecoveryConfig directoryProvider = taskLocalStateStore.getLocalRecoveryConfig();
assertThat(
directoryProvider
.getLocalStateDirectoryProvider()
.get()
.allocationBaseDirsCount())
.isEqualTo(allocationBaseDirs.length);
for (int i = 0; i < allocationBaseDirs.length; ++i) {
assertThat(
directoryProvider
.getLocalStateDirectoryProvider()
.get()
.selectAllocationBaseDirectory(i))
.isEqualTo(allocationBaseDirs[i]);
}
}
/** Tests basic store/retrieve of local state. */
@Test
void storeAndRetrieve() throws Exception {
final int chkCount = 3;
for (int i = 0; i < chkCount; ++i) {
assertThat(taskLocalStateStore.retrieveLocalState(i)).isNull();
}
List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount);
checkStoredAsExpected(taskStateSnapshots, 0, chkCount);
assertThat(taskLocalStateStore.retrieveLocalState(chkCount + 1)).isNull();
}
/** Test checkpoint pruning. */
@Test
void pruneCheckpoints() throws Exception {
final int chkCount = 3;
List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount);
// test retrieve with pruning
taskLocalStateStore.pruneMatchingCheckpoints((long chk) -> chk != chkCount - 1);
for (int i = 0; i < chkCount - 1; ++i) {
assertThat(taskLocalStateStore.retrieveLocalState(i)).isNull();
}
checkStoredAsExpected(taskStateSnapshots, chkCount - 1, chkCount);
}
/** Tests pruning of previous checkpoints if a new checkpoint is confirmed. */
@Test
void confirmCheckpoint() throws Exception {
final int chkCount = 3;
final int confirmed = chkCount - 1;
List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount);
taskLocalStateStore.confirmCheckpoint(confirmed);
checkPrunedAndDiscarded(taskStateSnapshots, 0, confirmed);
checkStoredAsExpected(taskStateSnapshots, confirmed, chkCount);
}
/** Tests pruning of target previous checkpoints if that checkpoint is aborted. */
@Test
void abortCheckpoint() throws Exception {
final int chkCount = 4;
final int aborted = chkCount - 2;
List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount);
taskLocalStateStore.abortCheckpoint(aborted);
checkPrunedAndDiscarded(taskStateSnapshots, aborted, aborted + 1);
checkStoredAsExpected(taskStateSnapshots, 0, aborted);
checkStoredAsExpected(taskStateSnapshots, aborted + 1, chkCount);
}
/**
* Tests that disposal of a {@link TaskLocalStateStoreImpl} works and discards all local states.
*/
@Test
void dispose() throws Exception {
final int chkCount = 3;
final int confirmed = chkCount - 1;
List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount);
taskLocalStateStore.confirmCheckpoint(confirmed);
taskLocalStateStore.dispose();
checkPrunedAndDiscarded(taskStateSnapshots, 0, chkCount);
}
@Test
void retrieveNullIfNoPersistedLocalState() {
assertThat(taskLocalStateStore.retrieveLocalState(0)).isNull();
}
@Test
void retrieveNullIfDisableLocalRecovery() {
LocalSnapshotDirectoryProvider directoryProvider =
new LocalSnapshotDirectoryProviderImpl(
allocationBaseDirs, jobID, jobVertexID, subtaskIdx);
LocalRecoveryConfig localRecoveryConfig =
new LocalRecoveryConfig(false, true, directoryProvider);
TaskLocalStateStoreImpl localStateStore =
new TaskLocalStateStoreImpl(
jobID,
allocationID,
jobVertexID,
subtaskIdx,
localRecoveryConfig,
Executors.directExecutor());
final TaskStateSnapshot taskStateSnapshot = createTaskStateSnapshot();
final long checkpointId = 1L;
localStateStore.storeLocalState(checkpointId, taskStateSnapshot);
assertThat(localStateStore.retrieveLocalState(checkpointId)).isNull();
}
@Test
void retrievePersistedLocalStateFromDisc() {
final TaskStateSnapshot taskStateSnapshot = createTaskStateSnapshot();
final long checkpointId = 0L;
taskLocalStateStore.storeLocalState(checkpointId, taskStateSnapshot);
final TaskLocalStateStoreImpl newTaskLocalStateStore =
createTaskLocalStateStoreImpl(
allocationBaseDirs, jobID, allocationID, jobVertexID, 0);
final TaskStateSnapshot retrievedTaskStateSnapshot =
newTaskLocalStateStore.retrieveLocalState(checkpointId);
assertThat(retrievedTaskStateSnapshot).isEqualTo(taskStateSnapshot);
}
@Nonnull
protected TaskStateSnapshot createTaskStateSnapshot() {
final Map<OperatorID, OperatorSubtaskState> operatorSubtaskStates = new HashMap<>();
operatorSubtaskStates.put(new OperatorID(), OperatorSubtaskState.builder().build());
operatorSubtaskStates.put(new OperatorID(), OperatorSubtaskState.builder().build());
final TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot(operatorSubtaskStates);
return taskStateSnapshot;
}
@Test
void deletesLocalStateIfRetrievalFails() throws IOException {
final TaskStateSnapshot taskStateSnapshot = createTaskStateSnapshot();
final long checkpointId = 0L;
taskLocalStateStore.storeLocalState(checkpointId, taskStateSnapshot);
final File taskStateSnapshotFile =
taskLocalStateStore.getTaskStateSnapshotFile(checkpointId);
Files.write(
taskStateSnapshotFile.toPath(), new byte[] {1, 2, 3, 4}, StandardOpenOption.WRITE);
final TaskLocalStateStoreImpl newTaskLocalStateStore =
createTaskLocalStateStoreImpl(
allocationBaseDirs, jobID, allocationID, jobVertexID, subtaskIdx);
assertThat(newTaskLocalStateStore.retrieveLocalState(checkpointId)).isNull();
assertThat(taskStateSnapshotFile.getParentFile()).doesNotExist();
}
private void checkStoredAsExpected(List<TestingTaskStateSnapshot> history, int start, int end) {
for (int i = start; i < end; ++i) {
TestingTaskStateSnapshot expected = history.get(i);
assertThat(taskLocalStateStore.retrieveLocalState(i)).isSameAs(expected);
assertThat(expected.isDiscarded()).isFalse();
}
}
private void checkPrunedAndDiscarded(
List<TestingTaskStateSnapshot> history, int start, int end) {
for (int i = start; i < end; ++i) {
assertThat(taskLocalStateStore.retrieveLocalState(i)).isNull();
assertThat(history.get(i).isDiscarded()).isTrue();
}
}
private List<TestingTaskStateSnapshot> storeStates(int count) {
List<TestingTaskStateSnapshot> taskStateSnapshots = new ArrayList<>(count);
for (int i = 0; i < count; ++i) {
OperatorID operatorID = new OperatorID();
TestingTaskStateSnapshot taskStateSnapshot = new TestingTaskStateSnapshot();
OperatorSubtaskState operatorSubtaskState = OperatorSubtaskState.builder().build();
taskStateSnapshot.putSubtaskStateByOperatorID(operatorID, operatorSubtaskState);
taskLocalStateStore.storeLocalState(i, taskStateSnapshot);
taskStateSnapshots.add(taskStateSnapshot);
}
return taskStateSnapshots;
}
protected static final | TaskLocalStateStoreImplTest |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/http/impl/http1x/Http1xServerRequest.java | {
"start": 2183,
"end": 19926
} | class ____ extends HttpServerRequestInternal implements io.vertx.core.spi.observability.HttpRequest {
private static final HostAndPort NULL_HOST_AND_PORT = HostAndPort.create("", -1);
private final Http1xServerConnection conn;
final ContextInternal context;
private HttpRequest request;
private io.vertx.core.http.HttpVersion version;
private io.vertx.core.http.HttpMethod method;
private HostAndPort authority;
private String uri;
private String path;
private String query;
// Accessed on event loop
Object metric;
Object trace;
boolean reportMetricsFailed;
private Http1xServerResponse response;
// Cache this for performance
private Charset paramsCharset = StandardCharsets.UTF_8;
private MultiMap params;
private boolean semicolonIsNormalCharInParams;
private MultiMap headers;
private String absoluteURI;
private HttpEventHandler eventHandler;
private Handler<HttpServerFileUpload> uploadHandler;
private MultiMap attributes;
private boolean expectMultipart;
private HttpPostRequestDecoder decoder;
private boolean ended;
private long bytesRead;
private volatile InboundMessageQueue<Object> queue;
Http1xServerRequest(Http1xServerConnection conn, HttpRequest request, ContextInternal context) {
this.conn = conn;
this.context = context;
this.request = request;
}
private InboundMessageQueue<Object> queue() {
return queue(true);
}
private InboundMessageQueue<Object> queue(boolean create) {
InboundMessageQueue<Object> ref = queue;
if (create && ref == null) {
synchronized (this) {
ref = queue;
if (ref == null) {
ref = new InboundMessageQueue<>(context.eventLoop(), context.executor()) {
@Override
protected void handleMessage(Object elt) {
if (elt == InboundBuffer.END_SENTINEL) {
onEnd();
} else {
onData((Buffer) elt);
}
}
@Override
protected void handleResume() {
conn.doResume();
}
@Override
protected void handlePause() {
conn.doPause();
}
};
queue = ref;
}
}
}
return ref;
}
private HttpEventHandler eventHandler(boolean create) {
if (eventHandler == null && create) {
eventHandler = new HttpEventHandler(context);
}
return eventHandler;
}
public HttpRequest nettyRequest() {
synchronized (conn) {
return request;
}
}
void handleBegin(boolean keepAlive) {
if (METRICS_ENABLED) {
reportRequestBegin();
}
response = new Http1xServerResponse(context.owner(), context, conn, request, metric, keepAlive);
if (conn.handle100ContinueAutomatically) {
check100();
}
}
void handleContent(Buffer buffer) {
InboundMessageQueue<Object> queue = queue();
boolean drain = queue.add(buffer);
if (drain) {
queue.drain();
}
}
void handleEnd() {
InboundMessageQueue<Object> queue = queue(false);
if (queue != null) {
handleEnd(queue);
} else {
context.execute(this, Http1xServerRequest::onEnd);
}
}
private void handleEnd(InboundMessageQueue<Object> queue) {
boolean drain = queue.add(InboundBuffer.END_SENTINEL);
if (drain) {
queue.drain();
}
}
private void check100() {
if (HttpUtil.is100ContinueExpected(request)) {
response.writeContinue();
}
}
public Object metric() {
return metric;
}
Object trace() {
return trace;
}
@Override
public ContextInternal context() {
return context;
}
@Override
public int id() {
return 0;
}
@Override
public io.vertx.core.http.HttpVersion version() {
if (version == null) {
io.netty.handler.codec.http.HttpVersion nettyVersion = request.protocolVersion();
if (nettyVersion == io.netty.handler.codec.http.HttpVersion.HTTP_1_0) {
version = HttpVersion.HTTP_1_0;
} else if (nettyVersion == io.netty.handler.codec.http.HttpVersion.HTTP_1_1) {
version = HttpVersion.HTTP_1_1;
}
}
return version;
}
@Override
public io.vertx.core.http.HttpMethod method() {
if (method == null) {
method = io.vertx.core.http.HttpMethod.fromNetty(request.method());
}
return method;
}
@Override
public String uri() {
if (uri == null) {
uri = request.uri();
}
return uri;
}
@Override
public String path() {
if (path == null) {
path = HttpUtils.parsePath(uri());
}
return path;
}
@Override
public String query() {
if (query == null) {
query = HttpUtils.parseQuery(uri());
}
return query;
}
@Override
public boolean isValidAuthority() {
HostAndPort authority = this.authority;
if (authority == NULL_HOST_AND_PORT) {
return false;
}
if (authority != null) {
return true;
}
String host = getHeader(HttpHeaderNames.HOST);
if (host == null || !HostAndPortImpl.isValidAuthority(host)) {
this.authority = NULL_HOST_AND_PORT;
return false;
}
return true;
}
@Override
public HostAndPort authority() {
HostAndPort authority = this.authority;
if (authority == NULL_HOST_AND_PORT) {
return null;
}
if (authority == null) {
String host = getHeader(HttpHeaderNames.HOST);
if (host == null) {
this.authority = NULL_HOST_AND_PORT;
return null;
}
// it's fine to have a benign race here as long as HostAndPort is immutable
// to ensure safe publication
authority = HostAndPort.parseAuthority(host, -1);
this.authority = authority;
}
return authority;
}
@Override
public HostAndPort authority(boolean real) {
return real ? null : authority();
}
@Override
public long bytesRead() {
synchronized (conn) {
return bytesRead;
}
}
@Override
public Http1xServerResponse response() {
return response;
}
@Override
public MultiMap headers() {
MultiMap headers = this.headers;
if (headers == null) {
HttpHeaders reqHeaders = request.headers();
if (reqHeaders instanceof MultiMap) {
headers = (MultiMap) reqHeaders;
} else {
headers = new HeadersAdaptor(reqHeaders);
}
this.headers = headers;
}
return headers;
}
@Override
public HttpServerRequest setParamsCharset(String charset) {
Objects.requireNonNull(charset, "Charset must not be null");
Charset current = paramsCharset;
paramsCharset = Charset.forName(charset);
if (!paramsCharset.equals(current)) {
params = null;
}
return this;
}
@Override
public String getParamsCharset() {
return paramsCharset.name();
}
@Override
public MultiMap params(boolean semicolonIsNormalChar) {
if (params == null || semicolonIsNormalChar != semicolonIsNormalCharInParams) {
params = HttpUtils.params(uri(), paramsCharset, semicolonIsNormalChar);
semicolonIsNormalCharInParams = semicolonIsNormalChar;
}
return params;
}
@Override
public HttpServerRequest handler(Handler<Buffer> handler) {
synchronized (conn) {
if (handler != null) {
checkEnded();
}
HttpEventHandler eventHandler = eventHandler(handler != null);
if (eventHandler != null) {
eventHandler.chunkHandler(handler);
}
return this;
}
}
@Override
public HttpServerRequest exceptionHandler(Handler<Throwable> handler) {
synchronized (conn) {
HttpEventHandler eventHandler = eventHandler(handler != null);
if (eventHandler != null) {
eventHandler.exceptionHandler(handler);
}
return this;
}
}
@Override
public HttpServerRequest pause() {
queue().pause();
return this;
}
@Override
public HttpServerRequest fetch(long amount) {
queue().fetch(amount);
return this;
}
@Override
public HttpServerRequest resume() {
return fetch(Long.MAX_VALUE);
}
@Override
public HttpServerRequest endHandler(Handler<Void> handler) {
synchronized (conn) {
if (handler != null) {
checkEnded();
}
HttpEventHandler eventHandler = eventHandler(handler != null);
if (eventHandler != null) {
eventHandler.endHandler(handler);
}
return this;
}
}
@Override
public String scheme() {
return isSSL() ? "https" : "http";
}
@Override
public String absoluteURI() {
if (absoluteURI == null) {
absoluteURI = HttpUtils.absoluteURI(conn.serverOrigin(), this);
}
return absoluteURI;
}
@Override
public SocketAddress remoteAddress() {
return super.remoteAddress();
}
@Override
public Future<NetSocket> toNetSocket() {
return response.netSocket(method(), headers());
}
@Override
public HttpServerRequest uploadHandler(Handler<HttpServerFileUpload> handler) {
synchronized (conn) {
if (handler != null) {
checkEnded();
}
uploadHandler = handler;
return this;
}
}
@Override
public MultiMap formAttributes() {
return attributes();
}
@Override
public String getFormAttribute(String attributeName) {
return formAttributes().get(attributeName);
}
@Override
public Future<ServerWebSocket> toWebSocket() {
return webSocketHandshake().compose(handshake -> handshake.accept());
}
/**
* @return a future of the un-accepted WebSocket
*/
Future<ServerWebSocketHandshake> webSocketHandshake() {
PromiseInternal<ServerWebSocketHandshake> promise = context.promise();
webSocketHandshake(promise);
return promise.future();
}
/**
* Handle the request when a WebSocket upgrade header is present.
*/
private void webSocketHandshake(PromiseInternal<ServerWebSocketHandshake> promise) {
BufferInternal body = BufferInternal.buffer();
boolean[] failed = new boolean[1];
handler(buff -> {
if (!failed[0]) {
body.appendBuffer(buff);
if (body.length() > 8192) {
failed[0] = true;
// Request Entity Too Large
response.setStatusCode(413).end();
conn.close();
}
}
});
exceptionHandler(promise::tryFail);
endHandler(v -> {
if (!failed[0]) {
// Handle the request once we have the full body.
request = new DefaultFullHttpRequest(
request.protocolVersion(),
request.method(),
request.uri(),
body.getByteBuf(),
request.headers(),
EmptyHttpHeaders.INSTANCE
);
conn.createWebSocket(this, promise);
}
});
// In case we were paused
resume();
}
@Override
public HttpServerRequest setExpectMultipart(boolean expect) {
synchronized (conn) {
checkEnded();
expectMultipart = expect;
if (expect) {
if (decoder == null) {
String contentType = request.headers().get(HttpHeaderNames.CONTENT_TYPE);
if (contentType == null) {
throw new IllegalStateException("Request must have a content-type header to decode a multipart request");
}
if (!HttpUtils.isValidMultipartContentType(contentType)) {
throw new IllegalStateException("Request must have a valid content-type header to decode a multipart request");
}
if (!HttpUtils.isValidMultipartMethod(request.method())) {
throw new IllegalStateException("Request method must be one of POST, PUT, PATCH or DELETE to decode a multipart request");
}
NettyFileUploadDataFactory factory = new NettyFileUploadDataFactory(context, this, () -> uploadHandler);
HttpServerOptions options = conn.options;
factory.setMaxLimit(options.getMaxFormAttributeSize());
int maxFields = options.getMaxFormFields();
int maxBufferedBytes = options.getMaxFormBufferedBytes();
decoder = new HttpPostRequestDecoder(factory, request, HttpConstants.DEFAULT_CHARSET, maxFields, maxBufferedBytes);
}
} else {
decoder = null;
}
return this;
}
}
@Override
public synchronized boolean isExpectMultipart() {
return expectMultipart;
}
@Override
public boolean isEnded() {
synchronized (conn) {
return ended;
}
}
@Override
public HttpServerRequest customFrameHandler(Handler<HttpFrame> handler) {
return this;
}
@Override
public HttpConnection connection() {
return conn;
}
@Override
public synchronized Future<Buffer> body() {
checkEnded();
return eventHandler(true).body();
}
@Override
public synchronized Future<Void> end() {
checkEnded();
return eventHandler(true).end();
}
private void onData(Buffer data) {
HttpEventHandler handler;
synchronized (conn) {
bytesRead += data.length();
if (decoder != null) {
try {
decoder.offer(new DefaultHttpContent(((BufferInternal)data).getByteBuf()));
} catch (HttpPostRequestDecoder.ErrorDataDecoderException |
HttpPostRequestDecoder.TooLongFormFieldException |
HttpPostRequestDecoder.TooManyFormFieldsException e) {
decoder.destroy();
decoder = null;
handleException(e);
}
}
handler = eventHandler;
}
if (handler != null) {
eventHandler.handleChunk(data);
}
}
private void onEnd() {
if (METRICS_ENABLED) {
reportRequestComplete();
}
HttpEventHandler handler;
synchronized (conn) {
if (decoder != null) {
endDecode();
}
ended = true;
handler = eventHandler;
}
// If there have been uploads then we let the last one call the end handler once any fileuploads are complete
if (handler != null) {
handler.handleEnd();
}
}
private void reportRequestComplete() {
HttpServerMetrics metrics = conn.metrics;
if (metrics != null) {
metrics.requestEnd(metric, this, bytesRead);
conn.flushBytesRead();
}
}
private void reportRequestBegin() {
HttpServerMetrics metrics = conn.metrics;
if (metrics != null) {
metric = metrics.requestBegin(conn.metric(), this);
}
VertxTracer tracer = context.tracer();
if (tracer != null) {
trace = tracer.receiveRequest(context, SpanKind.RPC, conn.tracingPolicy(), this, request.method().name(), request.headers(), HttpUtils.SERVER_REQUEST_TAG_EXTRACTOR);
}
}
private void endDecode() {
try {
decoder.offer(LastHttpContent.EMPTY_LAST_CONTENT);
while (decoder.hasNext()) {
InterfaceHttpData data = decoder.next();
if (data instanceof Attribute) {
Attribute attr = (Attribute) data;
try {
attributes().add(attr.getName(), attr.getValue());
} catch (Exception e) {
// Will never happen, anyway handle it somehow just in case
handleException(e);
} finally {
attr.release();
}
}
}
} catch (HttpPostRequestDecoder.ErrorDataDecoderException |
HttpPostRequestDecoder.TooLongFormFieldException |
HttpPostRequestDecoder.TooManyFormFieldsException e) {
handleException(e);
} catch (HttpPostRequestDecoder.EndOfDataDecoderException e) {
// ignore this as it is expected
} finally {
decoder.destroy();
decoder = null;
}
}
void handleException(Throwable t) {
HttpEventHandler handler = null;
Http1xServerResponse resp = null;
InterfaceHttpData upload = null;
synchronized (conn) {
if (!isEnded()) {
handler = eventHandler;
if (decoder != null) {
upload = decoder.currentPartialHttpData();
}
}
if (!response.ended()) {
if (METRICS_ENABLED) {
reportRequestReset(t);
}
resp = response;
}
}
if (resp != null) {
resp.handleException(t);
}
if (upload instanceof NettyFileUpload) {
((NettyFileUpload) upload).handleException(t);
}
if (handler != null) {
handler.handleException(t);
}
}
private void reportRequestReset(Throwable err) {
if (conn.metrics != null) {
conn.metrics.requestReset(metric);
}
VertxTracer tracer = context.tracer();
if (tracer != null) {
tracer.sendResponse(context, null, trace, err, TagExtractor.empty());
}
}
private void checkEnded() {
if (isEnded()) {
throw new IllegalStateException("Request has already been read");
}
}
private MultiMap attributes() {
// Create it lazily
if (attributes == null) {
attributes = MultiMap.caseInsensitiveMultiMap();
}
return attributes;
}
@Override
public HttpServerRequest streamPriorityHandler(Handler<StreamPriority> handler) {
return this;
}
@Override
public DecoderResult decoderResult() {
return request.decoderResult();
}
@Override
public Set<Cookie> cookies() {
return (Set) response.cookies();
}
@Override
public Set<Cookie> cookies(String name) {
return (Set) response.cookies().getAll(name);
}
@Override
public Cookie getCookie(String name) {
return response.cookies()
.get(name);
}
@Override
public Cookie getCookie(String name, String domain, String path) {
return response.cookies()
.get(name, domain, path);
}
@Override
public HttpServerRequest routed(String route) {
if (METRICS_ENABLED && !response.ended() && conn.metrics != null) {
conn.metrics.requestRouted(metric, route);
}
return this;
}
}
| Http1xServerRequest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/ProactiveAuthCompletionExceptionMapperTest.java | {
"start": 2420,
"end": 2701
} | class ____ {
@ServerExceptionMapper(value = AuthenticationCompletionException.class)
public Response unauthorized() {
return Response.status(UNAUTHORIZED).entity(AUTHENTICATION_COMPLETION_EX).build();
}
}
}
| CustomAuthCompletionExceptionMapper |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/PropertySourceAnnotationTests.java | {
"start": 13868,
"end": 14470
} | class ____ {
@Inject Environment env;
@Bean
FactoryBean<TestBean> testBean() {
final String name = env.getProperty("testbean.name");
return new FactoryBean<>() {
@Override
public TestBean getObject() {
return new TestBean(name);
}
@Override
public Class<?> getObjectType() {
return TestBean.class;
}
@Override
public boolean isSingleton() {
return false;
}
};
}
}
@Configuration
@PropertySource(name="p1", value="classpath:org/springframework/context/annotation/p1.properties")
static | ConfigWithResolvablePlaceholderAndFactoryBean |
java | google__gson | gson/src/test/java/com/google/gson/functional/JsonAdapterSerializerDeserializerTest.java | {
"start": 5637,
"end": 5779
} | class ____<T> {
@SuppressWarnings("unused")
T value;
Base(T value) {
this.value = value;
}
}
private static final | Base |
java | quarkusio__quarkus | extensions/websockets-next/runtime/src/main/java/io/quarkus/websockets/next/WebSocketConnection.java | {
"start": 1370,
"end": 1733
} | interface ____ extends Sender {
/**
*
* @param predicate
* @return a new sender that sends messages to all open clients connected to the same WebSocket endpoint and matching
* the given filter predicate
*/
BroadcastSender filter(Predicate<WebSocketConnection> predicate);
}
}
| BroadcastSender |
java | google__auto | value/src/main/java/com/google/auto/value/extension/AutoValueExtension.java | {
"start": 3141,
"end": 3418
} | class ____ the hierarchy will always be the one generated by the AutoValue
* processor and the last one will always be the one generated by the Extension that {@code
* mustBeFinal}, if any. Other than that, the order of the classes in the hierarchy is unspecified.
* The last | in |
java | spring-projects__spring-framework | spring-context-support/src/test/java/org/springframework/cache/jcache/interceptor/AnnotationCacheOperationSourceTests.java | {
"start": 9496,
"end": 10081
} | class ____ {
@CacheResult(cacheKeyGenerator = TestableCacheKeyGenerator.class)
public Object customKeyGenerator(Long id) {
return null;
}
@CacheResult(cacheKeyGenerator = TestableCacheKeyGenerator.class)
public Object customKeyGeneratorSpringBean(Long id) {
return null;
}
@CacheResult(cacheResolverFactory = TestableCacheResolverFactory.class)
public Object customCacheResolver(Long id) {
return null;
}
}
@CacheDefaults(cacheResolverFactory = TestableCacheResolverFactory.class, cacheKeyGenerator = TestableCacheKeyGenerator.class)
static | CustomService |
java | grpc__grpc-java | services/src/generated/test/grpc/io/grpc/reflection/testing/AnotherDynamicServiceGrpc.java | {
"start": 6038,
"end": 7065
} | class ____
extends io.grpc.stub.AbstractAsyncStub<AnotherDynamicServiceStub> {
private AnotherDynamicServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected AnotherDynamicServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new AnotherDynamicServiceStub(channel, callOptions);
}
/**
* <pre>
* A method
* </pre>
*/
public void method(io.grpc.reflection.testing.DynamicRequest request,
io.grpc.stub.StreamObserver<io.grpc.reflection.testing.DynamicReply> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getMethodMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service AnotherDynamicService.
* <pre>
* AnotherDynamicService
* </pre>
*/
public static final | AnotherDynamicServiceStub |
java | apache__camel | components/camel-pdf/src/main/java/org/apache/camel/component/pdf/PdfHeaderConstants.java | {
"start": 896,
"end": 2857
} | class ____ {
@Metadata(description = "Expected type is\n" +
"https://pdfbox.apache.org/docs/2.0.13/javadocs/org/apache/pdfbox/pdmodel/encryption/ProtectionPolicy.html[ProtectionPolicy].\n"
+
"If specified then PDF document will be encrypted with it.",
javaType = "org.apache.pdfbox.pdmodel.encryption.ProtectionPolicy")
public static final String PROTECTION_POLICY_HEADER_NAME = "protection-policy";
@Metadata(description = "*Mandatory* header for `append` operation and ignored in all other\n" +
"operations. Expected type is\n" +
"https://pdfbox.apache.org/docs/2.0.13/javadocs/org/apache/pdfbox/pdmodel/PDDocument.html[PDDocument].\n"
+
"Stores PDF document which will be used for append operation.",
javaType = "org.apache.pdfbox.pdmodel.PDDocument")
public static final String PDF_DOCUMENT_HEADER_NAME = "pdf-document";
@Metadata(description = "Expected type is\n" +
"https://pdfbox.apache.org/docs/2.0.13/javadocs/org/apache/pdfbox/pdmodel/encryption/DecryptionMaterial.html[DecryptionMaterial].\n"
+
"*Mandatory* header if PDF document is encrypted.",
javaType = "org.apache.pdfbox.pdmodel.encryption.DecryptionMaterial")
public static final String DECRYPTION_MATERIAL_HEADER_NAME = "decryption-material";
@Metadata(description = """
*Mandatory* header for `merge` operation and ignored in all other
operations.
The array of pdf files that will be merged.
""",
javaType = "java.util.List<java.io.File>")
public static final String FILES_TO_MERGE_HEADER_NAME = "files-to-merge";
private PdfHeaderConstants() {
}
}
| PdfHeaderConstants |
java | google__guava | android/guava/src/com/google/common/reflect/TypeToken.java | {
"start": 54873,
"end": 55101
} | class ____ of now. So setting it makes a backward compatible
// change. Going forward, if any incompatible change is added, we can change the UID back to 1.
private static final long serialVersionUID = 3637540370352322684L;
}
| as |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlAlterModelSetConverter.java | {
"start": 1466,
"end": 3102
} | class ____ extends AbstractSqlAlterModelConverter<SqlAlterModelSet> {
@Override
public Operation convertSqlNode(SqlAlterModelSet sqlAlterModelSet, ConvertContext context) {
ResolvedCatalogModel existingModel =
getExistingModel(
context, sqlAlterModelSet.getFullName(), sqlAlterModelSet.ifModelExists());
Map<String, String> changeModelOptions = sqlAlterModelSet.getProperties();
if (changeModelOptions.isEmpty()) {
throw new ValidationException("ALTER MODEL SET does not support empty option.");
}
List<ModelChange> modelChanges = new ArrayList<>();
changeModelOptions.forEach((key, value) -> modelChanges.add(ModelChange.set(key, value)));
if (existingModel == null) {
return new AlterModelChangeOperation(
context.getCatalogManager()
.qualifyIdentifier(
UnresolvedIdentifier.of(sqlAlterModelSet.getFullName())),
modelChanges,
null,
sqlAlterModelSet.ifModelExists());
}
Map<String, String> newOptions = new HashMap<>(existingModel.getOptions());
newOptions.putAll(changeModelOptions);
return new AlterModelChangeOperation(
context.getCatalogManager()
.qualifyIdentifier(UnresolvedIdentifier.of(sqlAlterModelSet.getFullName())),
modelChanges,
existingModel.copy(newOptions),
sqlAlterModelSet.ifModelExists());
}
}
| SqlAlterModelSetConverter |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/validation/JPAValidationDisabledTestCase.java | {
"start": 349,
"end": 1041
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyEntity.class, JPATestValidationResource.class)
.addAsResource("application-validation-disabled.properties", "application.properties"));
@Test
@Transactional
public void testValidEntity() {
String entityName = "Post method should not persist an entity having a Size constraint of 50 on the name column if validation was enabled.";
RestAssured.given().body(entityName).when().post("/validation").then()
.body(is("OK"));
}
}
| JPAValidationDisabledTestCase |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/EnhancementInfo.java | {
"start": 832,
"end": 901
} | class ____ enhanced.
*/
boolean includesAssociationManagement();
}
| was |
java | apache__flink | flink-state-backends/flink-statebackend-changelog/src/main/java/org/apache/flink/state/changelog/restore/FunctionDelegationHelper.java | {
"start": 2320,
"end": 3771
} | interface ____<F> extends Function {
void delegateIfNeeded(F delegated);
}
private final Map<String, DelegatingFunction> delegatingFunctions = new HashMap<>();
public <T, S extends State, F> void addOrUpdate(StateDescriptor<S, T> stateDescriptor) {
F function = tryGetFunction(stateDescriptor);
String name = stateDescriptor.getName();
if (function instanceof DelegatingFunction) {
LOG.debug("add delegate: {}", name);
delegatingFunctions.putIfAbsent(name, (DelegatingFunction<?>) function);
} else {
DelegatingFunction<F> delegating = delegatingFunctions.get(name);
if (delegating != null) {
LOG.debug("update delegate: {}", name);
checkState(function != null, "unable to extract function for state " + name);
delegating.delegateIfNeeded(function);
}
}
}
@Nullable
private static <F extends Function> F tryGetFunction(StateDescriptor<?, ?> stateDescriptor) {
if (stateDescriptor instanceof ReducingStateDescriptor) {
return (F) ((ReducingStateDescriptor) stateDescriptor).getReduceFunction();
} else if (stateDescriptor instanceof AggregatingStateDescriptor) {
return (F) ((AggregatingStateDescriptor) stateDescriptor).getAggregateFunction();
} else {
return null;
}
}
static | DelegatingFunction |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java | {
"start": 34846,
"end": 37087
} | class ____ implements CacheFile.EvictionListener {
private final long fileLength;
private final CacheKey cacheKey;
private final SearchableSnapshotDirectory directory;
final AtomicReference<CacheFile> cacheFile = new AtomicReference<>(); // null if evicted or not yet acquired
CacheFileReference(SearchableSnapshotDirectory directory, String fileName, long fileLength) {
this.cacheKey = directory.createCacheKey(fileName);
this.fileLength = fileLength;
this.directory = directory;
}
CacheFile get() throws Exception {
CacheFile currentCacheFile = cacheFile.get();
if (currentCacheFile != null) {
return currentCacheFile;
}
final CacheFile newCacheFile = directory.getCacheFile(cacheKey, fileLength);
synchronized (this) {
currentCacheFile = cacheFile.get();
if (currentCacheFile != null) {
return currentCacheFile;
}
newCacheFile.acquire(this);
final CacheFile previousCacheFile = cacheFile.getAndSet(newCacheFile);
assert previousCacheFile == null;
return newCacheFile;
}
}
@Override
public void onEviction(final CacheFile evictedCacheFile) {
synchronized (this) {
if (cacheFile.compareAndSet(evictedCacheFile, null)) {
evictedCacheFile.release(this);
}
}
}
void releaseOnClose() {
synchronized (this) {
final CacheFile currentCacheFile = cacheFile.getAndSet(null);
if (currentCacheFile != null) {
currentCacheFile.release(this);
}
}
}
@Override
public String toString() {
return "CacheFileReference{"
+ "cacheKey='"
+ cacheKey
+ '\''
+ ", fileLength="
+ fileLength
+ ", acquired="
+ (cacheFile.get() != null)
+ '}';
}
}
}
| CacheFileReference |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/conditional/propertyname/Address.java | {
"start": 245,
"end": 455
} | class ____ implements DomainModel {
private String street;
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
}
| Address |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-links/runtime/src/main/java/io/quarkus/resteasy/reactive/links/runtime/hal/ResteasyReactiveHalService.java | {
"start": 396,
"end": 1270
} | class ____ extends HalService {
private final RestLinksProvider linksProvider;
@Inject
public ResteasyReactiveHalService(RestLinksProvider linksProvider) {
this.linksProvider = linksProvider;
}
@Override
protected Map<String, HalLink> getClassLinks(Class<?> entityType) {
return linksToMap(linksProvider.getTypeLinks(entityType));
}
@Override
protected Map<String, HalLink> getInstanceLinks(Object entity) {
return linksToMap(linksProvider.getInstanceLinks(entity));
}
private Map<String, HalLink> linksToMap(Collection<Link> refLinks) {
Map<String, HalLink> links = new HashMap<>();
for (Link link : refLinks) {
links.put(link.getRel(), new HalLink(link.getUri().toString(), link.getTitle(), link.getType()));
}
return links;
}
}
| ResteasyReactiveHalService |
java | netty__netty | transport/src/main/java/io/netty/channel/group/ChannelMatchers.java | {
"start": 756,
"end": 845
} | class ____ provides often used {@link ChannelMatcher} implementations.
*/
public final | which |
java | google__dagger | javatests/dagger/functional/producers/subcomponent/ModuleSubcomponentsInterop.java | {
"start": 957,
"end": 1095
} | interface ____ {
ProductionChild.Builder productionChild();
}
@Module(subcomponents = ProductionChild.class)
static | ProvisionParent |
java | netty__netty | common/src/main/java/io/netty/util/internal/ObjectUtil.java | {
"start": 778,
"end": 12868
} | class ____ {
private static final float FLOAT_ZERO = 0.0F;
private static final double DOUBLE_ZERO = 0.0D;
private static final long LONG_ZERO = 0L;
private static final int INT_ZERO = 0;
private static final short SHORT_ZERO = 0;
private ObjectUtil() {
}
/**
* Checks that the given argument is not null. If it is, throws {@link NullPointerException}.
* Otherwise, returns the argument.
*/
public static <T> T checkNotNull(T arg, String text) {
if (arg == null) {
throw new NullPointerException(text);
}
return arg;
}
/**
* Check that the given varargs is not null and does not contain elements
* null elements.
*
* If it is, throws {@link NullPointerException}.
* Otherwise, returns the argument.
*/
public static <T> T[] deepCheckNotNull(String text, T... varargs) {
if (varargs == null) {
throw new NullPointerException(text);
}
for (T element : varargs) {
if (element == null) {
throw new NullPointerException(text);
}
}
return varargs;
}
/**
* Checks that the given argument is not null. If it is, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static <T> T checkNotNullWithIAE(final T arg, final String paramName) throws IllegalArgumentException {
if (arg == null) {
throw new IllegalArgumentException("Param '" + paramName + "' must not be null");
}
return arg;
}
/**
* Checks that the given argument is not null. If it is, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*
* @param <T> type of the given argument value.
* @param name of the parameter, belongs to the exception message.
* @param index of the array, belongs to the exception message.
* @param value to check.
* @return the given argument value.
* @throws IllegalArgumentException if value is null.
*/
public static <T> T checkNotNullArrayParam(T value, int index, String name) throws IllegalArgumentException {
if (value == null) {
throw new IllegalArgumentException(
"Array index " + index + " of parameter '" + name + "' must not be null");
}
return value;
}
/**
* Checks that the given argument is strictly positive. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static int checkPositive(int i, String name) {
if (i <= INT_ZERO) {
throw new IllegalArgumentException(name + " : " + i + " (expected: > 0)");
}
return i;
}
/**
* Checks that the given argument is strictly positive. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static long checkPositive(long l, String name) {
if (l <= LONG_ZERO) {
throw new IllegalArgumentException(name + " : " + l + " (expected: > 0)");
}
return l;
}
/**
* Checks that the given argument is strictly positive. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static double checkPositive(final double d, final String name) {
if (d <= DOUBLE_ZERO) {
throw new IllegalArgumentException(name + " : " + d + " (expected: > 0)");
}
return d;
}
/**
* Checks that the given argument is strictly positive. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static float checkPositive(final float f, final String name) {
if (f <= FLOAT_ZERO) {
throw new IllegalArgumentException(name + " : " + f + " (expected: > 0)");
}
return f;
}
/**
* Checks that the given argument is positive or zero. If it is not , throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static short checkPositive(short s, String name) {
if (s <= SHORT_ZERO) {
throw new IllegalArgumentException(name + " : " + s + " (expected: > 0)");
}
return s;
}
/**
* Checks that the given argument is positive or zero. If it is not , throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static int checkPositiveOrZero(int i, String name) {
if (i < INT_ZERO) {
throw new IllegalArgumentException(name + " : " + i + " (expected: >= 0)");
}
return i;
}
/**
* Checks that the given argument is positive or zero. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static long checkPositiveOrZero(long l, String name) {
if (l < LONG_ZERO) {
throw new IllegalArgumentException(name + " : " + l + " (expected: >= 0)");
}
return l;
}
/**
* Checks that the given argument is positive or zero. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static double checkPositiveOrZero(final double d, final String name) {
if (d < DOUBLE_ZERO) {
throw new IllegalArgumentException(name + " : " + d + " (expected: >= 0)");
}
return d;
}
/**
* Checks that the given argument is positive or zero. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static float checkPositiveOrZero(final float f, final String name) {
if (f < FLOAT_ZERO) {
throw new IllegalArgumentException(name + " : " + f + " (expected: >= 0)");
}
return f;
}
/**
* Checks that the given argument is in range. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static int checkInRange(int i, int start, int end, String name) {
if (i < start || i > end) {
throw new IllegalArgumentException(name + ": " + i + " (expected: " + start + "-" + end + ")");
}
return i;
}
/**
* Checks that the given argument is in range. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static long checkInRange(long l, long start, long end, String name) {
if (l < start || l > end) {
throw new IllegalArgumentException(name + ": " + l + " (expected: " + start + "-" + end + ")");
}
return l;
}
/**
* Checks that the given argument is in range. If it is not, throws {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static double checkInRange(double d, double start, double end, String name) {
if (d < start || d > end) {
throw new IllegalArgumentException(name + ": " + d + " (expected: " + start + "-" + end + ")");
}
return d;
}
/**
* Checks that the given argument is neither null nor empty.
* If it is, throws {@link NullPointerException} or {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static <T> T[] checkNonEmpty(T[] array, String name) {
//No String concatenation for check
if (checkNotNull(array, name).length == 0) {
throw new IllegalArgumentException("Param '" + name + "' must not be empty");
}
return array;
}
/**
* Checks that the given argument is neither null nor empty.
* If it is, throws {@link NullPointerException} or {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static byte[] checkNonEmpty(byte[] array, String name) {
//No String concatenation for check
if (checkNotNull(array, name).length == 0) {
throw new IllegalArgumentException("Param '" + name + "' must not be empty");
}
return array;
}
/**
* Checks that the given argument is neither null nor empty.
* If it is, throws {@link NullPointerException} or {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static char[] checkNonEmpty(char[] array, String name) {
//No String concatenation for check
if (checkNotNull(array, name).length == 0) {
throw new IllegalArgumentException("Param '" + name + "' must not be empty");
}
return array;
}
/**
* Checks that the given argument is neither null nor empty.
* If it is, throws {@link NullPointerException} or {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static <T extends Collection<?>> T checkNonEmpty(T collection, String name) {
//No String concatenation for check
if (checkNotNull(collection, name).isEmpty()) {
throw new IllegalArgumentException("Param '" + name + "' must not be empty");
}
return collection;
}
/**
* Checks that the given argument is neither null nor empty.
* If it is, throws {@link NullPointerException} or {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static String checkNonEmpty(final String value, final String name) {
if (checkNotNull(value, name).isEmpty()) {
throw new IllegalArgumentException("Param '" + name + "' must not be empty");
}
return value;
}
/**
* Checks that the given argument is neither null nor empty.
* If it is, throws {@link NullPointerException} or {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static <K, V, T extends Map<K, V>> T checkNonEmpty(T value, String name) {
if (checkNotNull(value, name).isEmpty()) {
throw new IllegalArgumentException("Param '" + name + "' must not be empty");
}
return value;
}
/**
* Checks that the given argument is neither null nor empty.
* If it is, throws {@link NullPointerException} or {@link IllegalArgumentException}.
* Otherwise, returns the argument.
*/
public static CharSequence checkNonEmpty(final CharSequence value, final String name) {
if (checkNotNull(value, name).length() == 0) {
throw new IllegalArgumentException("Param '" + name + "' must not be empty");
}
return value;
}
/**
* Trims the given argument and checks whether it is neither null nor empty.
* If it is, throws {@link NullPointerException} or {@link IllegalArgumentException}.
* Otherwise, returns the trimmed argument.
*
* @param value to trim and check.
* @param name of the parameter.
* @return the trimmed (not the original) value.
* @throws NullPointerException if value is null.
* @throws IllegalArgumentException if the trimmed value is empty.
*/
public static String checkNonEmptyAfterTrim(final String value, final String name) {
String trimmed = checkNotNull(value, name).trim();
return checkNonEmpty(trimmed, name);
}
/**
* Resolves a possibly null Integer to a primitive int, using a default value.
* @param wrapper the wrapper
* @param defaultValue the default value
* @return the primitive value
*/
public static int intValue(Integer wrapper, int defaultValue) {
return wrapper != null ? wrapper : defaultValue;
}
/**
* Resolves a possibly null Long to a primitive long, using a default value.
* @param wrapper the wrapper
* @param defaultValue the default value
* @return the primitive value
*/
public static long longValue(Long wrapper, long defaultValue) {
return wrapper != null ? wrapper : defaultValue;
}
}
| ObjectUtil |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/target/PoolingConfig.java | {
"start": 681,
"end": 784
} | interface ____ a pooling target source.
*
* @author Rod Johnson
* @author Juergen Hoeller
*/
public | for |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/process/RepeatedRowData.java | {
"start": 1361,
"end": 3998
} | class ____ implements RowData {
private final int count;
private RowData row;
public RepeatedRowData(int count) {
this.count = count;
}
/**
* Replaces the {@link RowData} backing this {@link RepeatedRowData}.
*
* <p>This method replaces the backing rows in place and does not return a new object. This is
* done for performance reasons.
*/
public RepeatedRowData replace(RowData row) {
this.row = row;
return this;
}
@Override
public int getArity() {
return row.getArity() * count;
}
@Override
public RowKind getRowKind() {
return row.getRowKind();
}
@Override
public void setRowKind(RowKind kind) {
row.setRowKind(kind);
}
@Override
public boolean isNullAt(int pos) {
return row.isNullAt(pos / count);
}
@Override
public boolean getBoolean(int pos) {
return row.getBoolean(pos / count);
}
@Override
public byte getByte(int pos) {
return row.getByte(pos / count);
}
@Override
public short getShort(int pos) {
return row.getShort(pos / count);
}
@Override
public int getInt(int pos) {
return row.getInt(pos / count);
}
@Override
public long getLong(int pos) {
return row.getLong(pos / count);
}
@Override
public float getFloat(int pos) {
return row.getFloat(pos / count);
}
@Override
public double getDouble(int pos) {
return row.getDouble(pos / count);
}
@Override
public StringData getString(int pos) {
return row.getString(pos / count);
}
@Override
public DecimalData getDecimal(int pos, int precision, int scale) {
return row.getDecimal(pos / count, precision, scale);
}
@Override
public TimestampData getTimestamp(int pos, int precision) {
return row.getTimestamp(pos / count, precision);
}
@Override
public <T> RawValueData<T> getRawValue(int pos) {
return row.getRawValue(pos / count);
}
@Override
public byte[] getBinary(int pos) {
return row.getBinary(pos / count);
}
@Override
public ArrayData getArray(int pos) {
return row.getArray(pos / count);
}
@Override
public MapData getMap(int pos) {
return row.getMap(pos / count);
}
@Override
public RowData getRow(int pos, int numFields) {
return row.getRow(pos / count, numFields);
}
@Override
public Variant getVariant(int pos) {
return row.getVariant(pos / count);
}
}
| RepeatedRowData |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/EsqlProjectSerializationTests.java | {
"start": 654,
"end": 1628
} | class ____ extends AbstractLogicalPlanSerializationTests<EsqlProject> {
@Override
protected EsqlProject createTestInstance() {
LogicalPlan child = randomChild(0);
List<Attribute> projections = randomFieldAttributes(1, 10, false);
return new EsqlProject(randomSource(), child, projections);
}
@Override
protected EsqlProject mutateInstance(EsqlProject instance) throws IOException {
LogicalPlan child = instance.child();
List<? extends NamedExpression> projections = instance.projections();
if (randomBoolean()) {
child = randomValueOtherThan(child, () -> randomChild(0));
} else {
projections = randomValueOtherThan(projections, () -> randomFieldAttributes(1, 10, false));
}
return new EsqlProject(instance.source(), child, projections);
}
@Override
protected boolean alwaysEmptySource() {
return true;
}
}
| EsqlProjectSerializationTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/android/FragmentNotInstantiableTest.java | {
"start": 3263,
"end": 3481
} | class ____ extends android.support.v4.app.Fragment {
public ParentFragmentV4() {}
}
// BUG: Diagnostic contains: nullary constructor
public static | ParentFragmentV4 |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/tools/picocli/CommandLine.java | {
"start": 271581,
"end": 274208
} | enum ____
* names when {@code "bg_"} is prepended, or it may be one of the indexed colors in the 256 color palette.
* @param str the case-insensitive style markup to convert, e.g. {@code "blue"} or {@code "bg_blue"},
* or {@code "46"} (indexed color) or {@code "0;5;0"} (RGB components of an indexed color)
* @return the IStyle for the specified converter
*/
public static IStyle bg(final String str) {
try {
return Style.valueOf(toRootLowerCase(str));
} catch (final Exception ignored) {
}
try {
return Style.valueOf("bg_" + toRootLowerCase(str));
} catch (final Exception ignored) {
}
return new Palette256Color(false, str);
}
/** Parses the specified comma-separated sequence of style descriptors and returns the associated
* styles. For each markup, strings starting with {@code "bg("} are delegated to
* {@link #bg(String)}, others are delegated to {@link #bg(String)}.
* @param commaSeparatedCodes one or more descriptors, e.g. {@code "bg(blue),underline,red"}
* @return an array with all styles for the specified descriptors
*/
public static IStyle[] parse(final String commaSeparatedCodes) {
final String[] codes = commaSeparatedCodes.split(",");
final IStyle[] styles = new IStyle[codes.length];
for (int i = 0; i < codes.length; ++i) {
if (toRootLowerCase(codes[i]).startsWith("fg(")) {
final int end = codes[i].indexOf(')');
styles[i] = Style.fg(codes[i].substring(3, end < 0 ? codes[i].length() : end));
} else if (toRootLowerCase(codes[i]).startsWith("bg(")) {
final int end = codes[i].indexOf(')');
styles[i] = Style.bg(codes[i].substring(3, end < 0 ? codes[i].length() : end));
} else {
styles[i] = Style.fg(codes[i]);
}
}
return styles;
}
}
/** Defines a palette map of 216 colors: 6 * 6 * 6 cube (216 colors):
* 16 + 36 * r + 6 * g + b (0 <= r, g, b <= 5). */
static | value |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/support/ModifierSupportTests.java | {
"start": 7267,
"end": 7334
} | class ____ {
static void staticMethod() {
}
}
final | StaticClass |
java | spring-projects__spring-boot | module/spring-boot-r2dbc/src/main/java/org/springframework/boot/r2dbc/metrics/ConnectionPoolMetrics.java | {
"start": 1181,
"end": 3166
} | class ____ implements MeterBinder {
private static final String CONNECTIONS = "connections";
private final ConnectionPool pool;
private final Iterable<Tag> tags;
public ConnectionPoolMetrics(ConnectionPool pool, String name, Iterable<Tag> tags) {
this.pool = pool;
this.tags = Tags.concat(tags, "name", name);
}
@Override
public void bindTo(MeterRegistry registry) {
this.pool.getMetrics().ifPresent((poolMetrics) -> {
bindConnectionPoolMetric(registry,
Gauge.builder(metricKey("acquired"), poolMetrics, PoolMetrics::acquiredSize)
.description("Size of successfully acquired connections which are in active use."));
bindConnectionPoolMetric(registry,
Gauge.builder(metricKey("allocated"), poolMetrics, PoolMetrics::allocatedSize)
.description("Size of allocated connections in the pool which are in active use or idle."));
bindConnectionPoolMetric(registry, Gauge.builder(metricKey("idle"), poolMetrics, PoolMetrics::idleSize)
.description("Size of idle connections in the pool."));
bindConnectionPoolMetric(registry,
Gauge.builder(metricKey("pending"), poolMetrics, PoolMetrics::pendingAcquireSize)
.description("Size of pending to acquire connections from the underlying connection factory."));
bindConnectionPoolMetric(registry,
Gauge.builder(metricKey("max.allocated"), poolMetrics, PoolMetrics::getMaxAllocatedSize)
.description("Maximum size of allocated connections that this pool allows."));
bindConnectionPoolMetric(registry,
Gauge.builder(metricKey("max.pending"), poolMetrics, PoolMetrics::getMaxPendingAcquireSize)
.description("Maximum size of pending state to acquire connections that this pool allows."));
});
}
private void bindConnectionPoolMetric(MeterRegistry registry, Builder<?> builder) {
builder.tags(this.tags).baseUnit(CONNECTIONS).register(registry);
}
private static String metricKey(String name) {
return "r2dbc.pool." + name;
}
}
| ConnectionPoolMetrics |
java | elastic__elasticsearch | x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithWorkflowsRestrictionRestIT.java | {
"start": 560,
"end": 2474
} | class ____ extends SecurityOnTrialLicenseRestTestCase {
public void testCreateRoleWithWorkflowsRestrictionFail() {
Request request = roleRequest("""
{
"cluster": ["all"],
"indices": [
{
"names": ["index-a"],
"privileges": ["all"]
}
],
"restriction":{
"workflows": ["foo", "bar"]
}
}""", "role_with_restriction");
ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(request));
assertEquals(400, e.getResponse().getStatusLine().getStatusCode());
assertThat(e.getMessage(), containsString("failed to parse role [role_with_restriction]. unexpected field [restriction]"));
}
public void testUpdateRoleWithWorkflowsRestrictionFail() throws IOException {
upsertRole("""
{
"cluster": ["all"],
"indices": [
{
"names": ["index-a"],
"privileges": ["all"]
}
]
}""", "my_role");
Request updateRoleRequest = roleRequest("""
{
"cluster": ["all"],
"indices": [
{
"names": ["index-*"],
"privileges": ["all"]
}
],
"restriction":{
"workflows": ["foo", "bar"]
}
}""", "my_role");
ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(updateRoleRequest));
assertEquals(400, e.getResponse().getStatusLine().getStatusCode());
assertThat(e.getMessage(), containsString("failed to parse role [my_role]. unexpected field [restriction]"));
}
}
| RoleWithWorkflowsRestrictionRestIT |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/floatarrays/FloatArrays_assertNotEmpty_Test.java | {
"start": 1546,
"end": 2267
} | class ____ extends FloatArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertNotEmpty(someInfo(), null))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_is_empty() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> arrays.assertNotEmpty(info, emptyArray()));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldNotBeEmpty());
}
@Test
void should_pass_if_actual_is_not_empty() {
arrays.assertNotEmpty(someInfo(), arrayOf(8f));
}
}
| FloatArrays_assertNotEmpty_Test |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/BatchConsumerPooledExchangeTest.java | {
"start": 1692,
"end": 5170
} | class ____ extends ContextTestSupport {
private final AtomicInteger counter = new AtomicInteger();
private final AtomicReference<Exchange> ref = new AtomicReference<>();
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
ExtendedCamelContext ecc = camelContext.getCamelContextExtension();
ecc.setExchangeFactory(new PooledExchangeFactory());
ecc.setProcessorExchangeFactory(new PooledProcessorExchangeFactory());
ecc.getExchangeFactory().setStatisticsEnabled(true);
ecc.getProcessorExchangeFactory().setStatisticsEnabled(true);
return camelContext;
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
template.sendBodyAndHeader(fileUri(), "aaa", Exchange.FILE_NAME, "aaa.BatchConsumerPooledExchangeTest.txt");
template.sendBodyAndHeader(fileUri(), "bbb", Exchange.FILE_NAME, "bbb.BatchConsumerPooledExchangeTest.txt");
template.sendBodyAndHeader(fileUri(), "ccc", Exchange.FILE_NAME, "ccc.BatchConsumerPooledExchangeTest.txt");
}
@Test
public void testNotSameExchange() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(3);
mock.expectedPropertyValuesReceivedInAnyOrder("myprop", 1, 3, 5);
mock.expectedHeaderValuesReceivedInAnyOrder("myheader", 2, 4, 6);
mock.message(0).header("first").isEqualTo(true);
mock.message(1).header("first").isNull();
mock.message(2).header("first").isNull();
context.getRouteController().startAllRoutes();
assertMockEndpointsSatisfied();
Awaitility.waitAtMost(2, TimeUnit.SECONDS).untilAsserted(() -> {
PooledObjectFactory.Statistics stat
= context.getCamelContextExtension().getExchangeFactoryManager().getStatistics();
assertEquals(1, stat.getCreatedCounter());
assertEquals(2, stat.getAcquiredCounter());
assertEquals(3, stat.getReleasedCounter());
assertEquals(0, stat.getDiscardedCounter());
});
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// maxMessagesPerPoll=1 to force polling 3 times to use pooled exchanges
from(fileUri("?initialDelay=0&delay=10&maxMessagesPerPoll=1")).autoStartup(false)
.setProperty("myprop", counter::incrementAndGet)
.setHeader("myheader", counter::incrementAndGet)
.process(new Processor() {
@Override
public void process(Exchange exchange) {
// should be same exchange instance as its pooled
Exchange old = ref.get();
if (old == null) {
ref.set(exchange);
exchange.getMessage().setHeader("first", true);
} else {
assertSame(old, exchange);
}
}
})
.to("mock:result");
}
};
}
}
| BatchConsumerPooledExchangeTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/policy/PriorityUtilizationQueueOrderingPolicy.java | {
"start": 3957,
"end": 7403
} | class ____
implements Comparator<PriorityQueueResourcesForSorting> {
final private String partition;
public PriorityQueueComparator(String partition) {
this.partition = partition;
}
@Override
public int compare(PriorityQueueResourcesForSorting q1Sort,
PriorityQueueResourcesForSorting q2Sort) {
int rc = compareQueueAccessToPartition(
q1Sort.nodeLabelAccessible,
q2Sort.nodeLabelAccessible);
if (0 != rc) {
return rc;
}
float q1AbsCapacity = q1Sort.absoluteCapacity;
float q2AbsCapacity = q2Sort.absoluteCapacity;
//If q1's abs capacity > 0 and q2 is 0, then prioritize q1
if (Float.compare(q1AbsCapacity, 0f) > 0 && Float.compare(q2AbsCapacity,
0f) == 0) {
return -1;
//If q2's abs capacity > 0 and q1 is 0, then prioritize q2
} else if (Float.compare(q2AbsCapacity, 0f) > 0 && Float.compare(
q1AbsCapacity, 0f) == 0) {
return 1;
} else if (Float.compare(q1AbsCapacity, 0f) == 0 && Float.compare(
q2AbsCapacity, 0f) == 0) {
// both q1 has 0 and q2 has 0 capacity, then fall back to using
// priority, abs used capacity to prioritize
float used1 = q1Sort.absoluteUsedCapacity;
float used2 = q2Sort.absoluteUsedCapacity;
return compare(q1Sort, q2Sort, used1, used2,
q1Sort.priority.
getPriority(), q2Sort.priority.getPriority());
} else{
// both q1 has positive abs capacity and q2 has positive abs
// capacity
float used1 = q1Sort.usedCapacity;
float used2 = q2Sort.usedCapacity;
return compare(q1Sort, q2Sort, used1, used2,
q1Sort.priority.getPriority(),
q2Sort.priority.getPriority());
}
}
private int compare(PriorityQueueResourcesForSorting q1Sort,
PriorityQueueResourcesForSorting q2Sort, float q1Used,
float q2Used, int q1Prior, int q2Prior) {
int p1 = 0;
int p2 = 0;
if (respectPriority) {
p1 = q1Prior;
p2 = q2Prior;
}
int rc = PriorityUtilizationQueueOrderingPolicy.compare(q1Used, q2Used,
p1, p2);
// For queue with same used ratio / priority, queue with higher configured
// capacity goes first
if (0 == rc) {
Resource minEffRes1 =
q1Sort.configuredMinResource;
Resource minEffRes2 =
q2Sort.configuredMinResource;
if (!minEffRes1.equals(Resources.none()) || !minEffRes2.equals(
Resources.none())) {
return minEffRes2.compareTo(minEffRes1);
}
float abs1 = q1Sort.absoluteCapacity;
float abs2 = q2Sort.absoluteCapacity;
return Float.compare(abs2, abs1);
}
return rc;
}
private int compareQueueAccessToPartition(boolean q1Accessible, boolean q2Accessible) {
// Everybody has access to default partition
if (StringUtils.equals(partition, RMNodeLabelsManager.NO_LABEL)) {
return 0;
}
/*
* Check accessible to given partition, if one queue accessible and
* the other not, accessible queue goes first.
*/
if (q1Accessible && !q2Accessible) {
return -1;
} else if (!q1Accessible && q2Accessible) {
return 1;
}
return 0;
}
}
/**
* A simple storage | PriorityQueueComparator |
java | playframework__playframework | core/play/src/main/java/play/mvc/WebSocket.java | {
"start": 5385,
"end": 8263
} | class ____<In, Out> {
private final PartialFunction<Message, F.Either<In, Message>> inMapper;
private final Function<Out, Message> outMapper;
public MappedWebSocketAcceptor(
PartialFunction<Message, F.Either<In, Message>> inMapper,
Function<Out, Message> outMapper) {
this.inMapper = inMapper;
this.outMapper = outMapper;
}
/**
* Accept a WebSocket.
*
* @param f A function that takes the request header, and returns a future of either the result
* to reject the WebSocket connection with, or a flow to handle the WebSocket messages.
* @return The WebSocket handler.
*/
public WebSocket acceptOrResult(
Function<Http.RequestHeader, CompletionStage<F.Either<Result, Flow<In, Out, ?>>>> f) {
return WebSocket.acceptOrResult(inMapper, f, outMapper);
}
/**
* Accept a WebSocket.
*
* @param f A function that takes the request header, and returns a flow to handle the WebSocket
* messages.
* @return The WebSocket handler.
*/
public WebSocket accept(Function<Http.RequestHeader, Flow<In, Out, ?>> f) {
return acceptOrResult(
request -> CompletableFuture.completedFuture(F.Either.Right(f.apply(request))));
}
}
/**
* Helper to create handlers for WebSockets.
*
* @param inMapper Function to map input messages. If it produces left, the message will be passed
* to the WebSocket flow, if it produces right, the message will be sent back out to the
* client - this can be used to send errors directly to the client.
* @param f The function to handle the WebSocket.
* @param outMapper Function to map output messages.
* @return The WebSocket handler.
*/
private static <In, Out> WebSocket acceptOrResult(
PartialFunction<Message, F.Either<In, Message>> inMapper,
Function<Http.RequestHeader, CompletionStage<F.Either<Result, Flow<In, Out, ?>>>> f,
Function<Out, Message> outMapper) {
return new WebSocket() {
@Override
public CompletionStage<F.Either<Result, Flow<Message, Message, ?>>> apply(
Http.RequestHeader request) {
return f.apply(request)
.thenApply(
resultOrFlow -> {
if (resultOrFlow.left.isPresent()) {
return F.Either.Left(resultOrFlow.left.get());
} else {
Flow<Message, Message, ?> flow =
PekkoStreams.bypassWith(
Flow.<Message>create().collect(inMapper),
play.api.libs.streams.PekkoStreams.onlyFirstCanFinishMerge(2),
resultOrFlow.right.get().map(outMapper::apply));
return F.Either.Right(flow);
}
});
}
};
}
}
| MappedWebSocketAcceptor |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/ttl/TtlAggregatingStateTestContext.java | {
"start": 1328,
"end": 3785
} | class ____
extends TtlMergingStateTestContext.TtlIntegerMergingStateTestContext<
TtlAggregatingState<?, String, Integer, Long, String>, Integer, String> {
private static final long DEFAULT_ACCUMULATOR = 3L;
@Override
void initTestValues() {
updateEmpty = 5;
updateUnexpired = 7;
updateExpired = 6;
getUpdateEmpty = "8";
getUnexpired = "15";
getUpdateExpired = "9";
}
@SuppressWarnings("unchecked")
@Override
public <US extends State, SV> StateDescriptor<US, SV> createStateDescriptor() {
return (StateDescriptor<US, SV>)
new AggregatingStateDescriptor<>(getName(), AGGREGATE, LongSerializer.INSTANCE);
}
@Override
public void update(Integer value) throws Exception {
ttlState.add(value);
}
@Override
public String get() throws Exception {
return ttlState.get();
}
@Override
public Object getOriginal() throws Exception {
return ttlState.original.get();
}
@Override
String getMergeResult(
List<Tuple2<String, Integer>> unexpiredUpdatesToMerge,
List<Tuple2<String, Integer>> finalUpdatesToMerge) {
Set<String> namespaces = new HashSet<>();
unexpiredUpdatesToMerge.forEach(t -> namespaces.add(t.f0));
finalUpdatesToMerge.forEach(t -> namespaces.add(t.f0));
return Integer.toString(
getIntegerMergeResult(unexpiredUpdatesToMerge, finalUpdatesToMerge)
+ namespaces.size() * (int) DEFAULT_ACCUMULATOR);
}
private static final AggregateFunction<Integer, Long, String> AGGREGATE =
new AggregateFunction<Integer, Long, String>() {
private static final long serialVersionUID = 815663074737539631L;
@Override
public Long createAccumulator() {
return DEFAULT_ACCUMULATOR;
}
@Override
public Long add(Integer value, Long accumulator) {
return accumulator + value;
}
@Override
public String getResult(Long accumulator) {
return accumulator.toString();
}
@Override
public Long merge(Long a, Long b) {
return a + b;
}
};
}
| TtlAggregatingStateTestContext |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java | {
"start": 5396,
"end": 6162
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory str;
private final EvalOperator.ExpressionEvaluator.Factory suffix;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str,
EvalOperator.ExpressionEvaluator.Factory suffix) {
this.source = source;
this.str = str;
this.suffix = suffix;
}
@Override
public EndsWithEvaluator get(DriverContext context) {
return new EndsWithEvaluator(source, str.get(context), suffix.get(context), context);
}
@Override
public String toString() {
return "EndsWithEvaluator[" + "str=" + str + ", suffix=" + suffix + "]";
}
}
}
| Factory |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/ProjectStateObserver.java | {
"start": 1012,
"end": 3036
} | class ____ {
private final ProjectId projectId;
private final ClusterStateObserver clusterObserver;
public ProjectStateObserver(
ProjectState initialState,
ClusterService clusterService,
@Nullable TimeValue timeout,
Logger logger,
ThreadContext contextHolder
) {
this(initialState.projectId(), initialState.cluster(), clusterService, timeout, logger, contextHolder);
}
private ProjectStateObserver(
ProjectId projectId,
ClusterState initialClusterState,
ClusterService clusterService,
@Nullable TimeValue timeout,
Logger logger,
ThreadContext contextHolder
) {
this.projectId = projectId;
this.clusterObserver = new ClusterStateObserver(initialClusterState, clusterService, timeout, logger, contextHolder);
}
public void waitForNextChange(ProjectStateObserver.Listener listener, @Nullable TimeValue timeOutValue) {
this.clusterObserver.waitForNextChange(new ListenerAdapter(listener), timeOutValue);
}
public boolean isTimedOut() {
return clusterObserver.isTimedOut();
}
/**
* Obtain the most recent {@link ClusterApplierService#state() applied cluster state} and then invoke either
* {@link ProjectStateObserver.Listener#onProjectStateChange(ProjectState)} or
* {@link ProjectStateObserver.Listener#onProjectMissing(ProjectId, ClusterState)} on the provided {@code listener}
*/
public void observeLastAppliedState(ProjectStateObserver.Listener listener) {
applyProjectState(clusterObserver.setAndGetObservedState(), listener);
}
private void applyProjectState(ClusterState clusterState, ProjectStateObserver.Listener listener) {
if (clusterState.metadata().hasProject(projectId)) {
listener.onProjectStateChange(clusterState.projectState(projectId));
} else {
listener.onProjectMissing(projectId, clusterState);
}
}
public | ProjectStateObserver |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.