repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/repository/package-info.java | fullstack/src/main/java/org/ehcache/sample/repository/package-info.java | /**
* Spring Data JPA repositories.
*/
package org.ehcache.sample.repository;
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/repository/CustomAuditEventRepository.java | fullstack/src/main/java/org/ehcache/sample/repository/CustomAuditEventRepository.java | package org.ehcache.sample.repository;
import org.ehcache.sample.config.Constants;
import org.ehcache.sample.config.audit.AuditEventConverter;
import org.ehcache.sample.domain.PersistentAuditEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.actuate.audit.AuditEvent;
import org.springframework.boot.actuate.audit.AuditEventRepository;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.time.Instant;
import java.util.*;
/**
* An implementation of Spring Boot's AuditEventRepository.
*/
@Repository
public class CustomAuditEventRepository implements AuditEventRepository {
private static final String AUTHORIZATION_FAILURE = "AUTHORIZATION_FAILURE";
/**
* Should be the same as in Liquibase migration.
*/
protected static final int EVENT_DATA_COLUMN_MAX_LENGTH = 255;
private final PersistenceAuditEventRepository persistenceAuditEventRepository;
private final AuditEventConverter auditEventConverter;
private final Logger log = LoggerFactory.getLogger(getClass());
public CustomAuditEventRepository(PersistenceAuditEventRepository persistenceAuditEventRepository,
AuditEventConverter auditEventConverter) {
this.persistenceAuditEventRepository = persistenceAuditEventRepository;
this.auditEventConverter = auditEventConverter;
}
@Override
public List<AuditEvent> find(String principal, Instant after, String type) {
Iterable<PersistentAuditEvent> persistentAuditEvents =
persistenceAuditEventRepository.findByPrincipalAndAuditEventDateAfterAndAuditEventType(principal, after, type);
return auditEventConverter.convertToAuditEvent(persistentAuditEvents);
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void add(AuditEvent event) {
if (!AUTHORIZATION_FAILURE.equals(event.getType()) &&
!Constants.ANONYMOUS_USER.equals(event.getPrincipal())) {
PersistentAuditEvent persistentAuditEvent = new PersistentAuditEvent();
persistentAuditEvent.setPrincipal(event.getPrincipal());
persistentAuditEvent.setAuditEventType(event.getType());
persistentAuditEvent.setAuditEventDate(event.getTimestamp());
Map<String, String> eventData = auditEventConverter.convertDataToStrings(event.getData());
persistentAuditEvent.setData(truncate(eventData));
persistenceAuditEventRepository.save(persistentAuditEvent);
}
}
/**
* Truncate event data that might exceed column length.
*/
private Map<String, String> truncate(Map<String, String> data) {
Map<String, String> results = new HashMap<>();
if (data != null) {
for (Map.Entry<String, String> entry : data.entrySet()) {
String value = entry.getValue();
if (value != null) {
int length = value.length();
if (length > EVENT_DATA_COLUMN_MAX_LENGTH) {
value = value.substring(0, EVENT_DATA_COLUMN_MAX_LENGTH);
log.warn("Event data for {} too long ({}) has been truncated to {}. Consider increasing column width.",
entry.getKey(), length, EVENT_DATA_COLUMN_MAX_LENGTH);
}
}
results.put(entry.getKey(), value);
}
}
return results;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/repository/PersistenceAuditEventRepository.java | fullstack/src/main/java/org/ehcache/sample/repository/PersistenceAuditEventRepository.java | package org.ehcache.sample.repository;
import org.ehcache.sample.domain.PersistentAuditEvent;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import java.time.Instant;
import java.util.List;
/**
* Spring Data JPA repository for the PersistentAuditEvent entity.
*/
public interface PersistenceAuditEventRepository extends JpaRepository<PersistentAuditEvent, Long> {
List<PersistentAuditEvent> findByPrincipal(String principal);
List<PersistentAuditEvent> findByAuditEventDateAfter(Instant after);
List<PersistentAuditEvent> findByPrincipalAndAuditEventDateAfter(String principal, Instant after);
List<PersistentAuditEvent> findByPrincipalAndAuditEventDateAfterAndAuditEventType(String principal, Instant after, String type);
Page<PersistentAuditEvent> findAllByAuditEventDateBetween(Instant fromDate, Instant toDate, Pageable pageable);
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/repository/AuthorityRepository.java | fullstack/src/main/java/org/ehcache/sample/repository/AuthorityRepository.java | package org.ehcache.sample.repository;
import org.ehcache.sample.domain.Authority;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* Spring Data JPA repository for the Authority entity.
*/
public interface AuthorityRepository extends JpaRepository<Authority, String> {
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/package-info.java | fullstack/src/main/java/org/ehcache/sample/security/package-info.java | /**
* Spring Security configuration.
*/
package org.ehcache.sample.security;
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/SpringSecurityAuditorAware.java | fullstack/src/main/java/org/ehcache/sample/security/SpringSecurityAuditorAware.java | package org.ehcache.sample.security;
import org.ehcache.sample.config.Constants;
import java.util.Optional;
import org.springframework.data.domain.AuditorAware;
import org.springframework.stereotype.Component;
/**
* Implementation of AuditorAware based on Spring Security.
*/
@Component
public class SpringSecurityAuditorAware implements AuditorAware<String> {
@Override
public Optional<String> getCurrentAuditor() {
return Optional.of(SecurityUtils.getCurrentUserLogin().orElse(Constants.SYSTEM_ACCOUNT));
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/UserNotActivatedException.java | fullstack/src/main/java/org/ehcache/sample/security/UserNotActivatedException.java | package org.ehcache.sample.security;
import org.springframework.security.core.AuthenticationException;
/**
* This exception is thrown in case of a not activated user trying to authenticate.
*/
public class UserNotActivatedException extends AuthenticationException {
private static final long serialVersionUID = 1L;
public UserNotActivatedException(String message) {
super(message);
}
public UserNotActivatedException(String message, Throwable t) {
super(message, t);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/SecurityUtils.java | fullstack/src/main/java/org/ehcache/sample/security/SecurityUtils.java | package org.ehcache.sample.security;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import java.util.Optional;
/**
* Utility class for Spring Security.
*/
public final class SecurityUtils {
private SecurityUtils() {
}
/**
* Get the login of the current user.
*
* @return the login of the current user
*/
public static Optional<String> getCurrentUserLogin() {
SecurityContext securityContext = SecurityContextHolder.getContext();
return Optional.ofNullable(securityContext.getAuthentication())
.map(authentication -> {
if (authentication.getPrincipal() instanceof UserDetails) {
UserDetails springSecurityUser = (UserDetails) authentication.getPrincipal();
return springSecurityUser.getUsername();
} else if (authentication.getPrincipal() instanceof String) {
return (String) authentication.getPrincipal();
}
return null;
});
}
/**
* Get the JWT of the current user.
*
* @return the JWT of the current user
*/
public static Optional<String> getCurrentUserJWT() {
SecurityContext securityContext = SecurityContextHolder.getContext();
return Optional.ofNullable(securityContext.getAuthentication())
.filter(authentication -> authentication.getCredentials() instanceof String)
.map(authentication -> (String) authentication.getCredentials());
}
/**
* Check if a user is authenticated.
*
* @return true if the user is authenticated, false otherwise
*/
public static boolean isAuthenticated() {
SecurityContext securityContext = SecurityContextHolder.getContext();
return Optional.ofNullable(securityContext.getAuthentication())
.map(authentication -> authentication.getAuthorities().stream()
.noneMatch(grantedAuthority -> grantedAuthority.getAuthority().equals(AuthoritiesConstants.ANONYMOUS)))
.orElse(false);
}
/**
* If the current user has a specific authority (security role).
* <p>
* The name of this method comes from the isUserInRole() method in the Servlet API
*
* @param authority the authority to check
* @return true if the current user has the authority, false otherwise
*/
public static boolean isCurrentUserInRole(String authority) {
SecurityContext securityContext = SecurityContextHolder.getContext();
return Optional.ofNullable(securityContext.getAuthentication())
.map(authentication -> authentication.getAuthorities().stream()
.anyMatch(grantedAuthority -> grantedAuthority.getAuthority().equals(authority)))
.orElse(false);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/AuthoritiesConstants.java | fullstack/src/main/java/org/ehcache/sample/security/AuthoritiesConstants.java | package org.ehcache.sample.security;
/**
* Constants for Spring Security authorities.
*/
public final class AuthoritiesConstants {
public static final String ADMIN = "ROLE_ADMIN";
public static final String USER = "ROLE_USER";
public static final String ANONYMOUS = "ROLE_ANONYMOUS";
private AuthoritiesConstants() {
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/DomainUserDetailsService.java | fullstack/src/main/java/org/ehcache/sample/security/DomainUserDetailsService.java | package org.ehcache.sample.security;
import org.ehcache.sample.domain.User;
import org.ehcache.sample.repository.UserRepository;
import org.hibernate.validator.internal.constraintvalidators.hv.EmailValidator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
import java.util.stream.Collectors;
/**
* Authenticate a user from the database.
*/
@Component("userDetailsService")
public class DomainUserDetailsService implements UserDetailsService {
private final Logger log = LoggerFactory.getLogger(DomainUserDetailsService.class);
private final UserRepository userRepository;
public DomainUserDetailsService(UserRepository userRepository) {
this.userRepository = userRepository;
}
@Override
@Transactional
public UserDetails loadUserByUsername(final String login) {
log.debug("Authenticating {}", login);
if (new EmailValidator().isValid(login, null)) {
return userRepository.findOneWithAuthoritiesByEmail(login)
.map(user -> createSpringSecurityUser(login, user))
.orElseThrow(() -> new UsernameNotFoundException("User with email " + login + " was not found in the database"));
}
String lowercaseLogin = login.toLowerCase(Locale.ENGLISH);
return userRepository.findOneWithAuthoritiesByLogin(lowercaseLogin)
.map(user -> createSpringSecurityUser(lowercaseLogin, user))
.orElseThrow(() -> new UsernameNotFoundException("User " + lowercaseLogin + " was not found in the database"));
}
private org.springframework.security.core.userdetails.User createSpringSecurityUser(String lowercaseLogin, User user) {
if (!user.getActivated()) {
throw new UserNotActivatedException("User " + lowercaseLogin + " was not activated");
}
List<GrantedAuthority> grantedAuthorities = user.getAuthorities().stream()
.map(authority -> new SimpleGrantedAuthority(authority.getName()))
.collect(Collectors.toList());
return new org.springframework.security.core.userdetails.User(user.getLogin(),
user.getPassword(),
grantedAuthorities);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/jwt/TokenProvider.java | fullstack/src/main/java/org/ehcache/sample/security/jwt/TokenProvider.java | package org.ehcache.sample.security.jwt;
import java.nio.charset.StandardCharsets;
import java.security.Key;
import java.util.*;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.User;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import io.github.jhipster.config.JHipsterProperties;
import io.jsonwebtoken.*;
import io.jsonwebtoken.io.Decoders;
import io.jsonwebtoken.security.Keys;
@Component
public class TokenProvider {
private final Logger log = LoggerFactory.getLogger(TokenProvider.class);
private static final String AUTHORITIES_KEY = "auth";
private Key key;
private long tokenValidityInMilliseconds;
private long tokenValidityInMillisecondsForRememberMe;
private final JHipsterProperties jHipsterProperties;
public TokenProvider(JHipsterProperties jHipsterProperties) {
this.jHipsterProperties = jHipsterProperties;
}
@PostConstruct
public void init() {
byte[] keyBytes;
String secret = jHipsterProperties.getSecurity().getAuthentication().getJwt().getSecret();
if (!StringUtils.isEmpty(secret)) {
log.warn("Warning: the JWT key used is not Base64-encoded. " +
"We recommend using the `jhipster.security.authentication.jwt.base64-secret` key for optimum security.");
keyBytes = secret.getBytes(StandardCharsets.UTF_8);
} else {
log.debug("Using a Base64-encoded JWT secret key");
keyBytes = Decoders.BASE64.decode(jHipsterProperties.getSecurity().getAuthentication().getJwt().getBase64Secret());
}
this.key = Keys.hmacShaKeyFor(keyBytes);
this.tokenValidityInMilliseconds =
1000 * jHipsterProperties.getSecurity().getAuthentication().getJwt().getTokenValidityInSeconds();
this.tokenValidityInMillisecondsForRememberMe =
1000 * jHipsterProperties.getSecurity().getAuthentication().getJwt()
.getTokenValidityInSecondsForRememberMe();
}
public String createToken(Authentication authentication, boolean rememberMe) {
String authorities = authentication.getAuthorities().stream()
.map(GrantedAuthority::getAuthority)
.collect(Collectors.joining(","));
long now = (new Date()).getTime();
Date validity;
if (rememberMe) {
validity = new Date(now + this.tokenValidityInMillisecondsForRememberMe);
} else {
validity = new Date(now + this.tokenValidityInMilliseconds);
}
return Jwts.builder()
.setSubject(authentication.getName())
.claim(AUTHORITIES_KEY, authorities)
.signWith(key, SignatureAlgorithm.HS512)
.setExpiration(validity)
.compact();
}
public Authentication getAuthentication(String token) {
Claims claims = Jwts.parser()
.setSigningKey(key)
.parseClaimsJws(token)
.getBody();
Collection<? extends GrantedAuthority> authorities =
Arrays.stream(claims.get(AUTHORITIES_KEY).toString().split(","))
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toList());
User principal = new User(claims.getSubject(), "", authorities);
return new UsernamePasswordAuthenticationToken(principal, token, authorities);
}
public boolean validateToken(String authToken) {
try {
Jwts.parser().setSigningKey(key).parseClaimsJws(authToken);
return true;
} catch (io.jsonwebtoken.security.SecurityException | MalformedJwtException e) {
log.info("Invalid JWT signature.");
log.trace("Invalid JWT signature trace: {}", e);
} catch (ExpiredJwtException e) {
log.info("Expired JWT token.");
log.trace("Expired JWT token trace: {}", e);
} catch (UnsupportedJwtException e) {
log.info("Unsupported JWT token.");
log.trace("Unsupported JWT token trace: {}", e);
} catch (IllegalArgumentException e) {
log.info("JWT token compact of handler are invalid.");
log.trace("JWT token compact of handler are invalid trace: {}", e);
}
return false;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/jwt/JWTConfigurer.java | fullstack/src/main/java/org/ehcache/sample/security/jwt/JWTConfigurer.java | package org.ehcache.sample.security.jwt;
import org.springframework.security.config.annotation.SecurityConfigurerAdapter;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.web.DefaultSecurityFilterChain;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
public class JWTConfigurer extends SecurityConfigurerAdapter<DefaultSecurityFilterChain, HttpSecurity> {
private TokenProvider tokenProvider;
public JWTConfigurer(TokenProvider tokenProvider) {
this.tokenProvider = tokenProvider;
}
@Override
public void configure(HttpSecurity http) throws Exception {
JWTFilter customFilter = new JWTFilter(tokenProvider);
http.addFilterBefore(customFilter, UsernamePasswordAuthenticationFilter.class);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/security/jwt/JWTFilter.java | fullstack/src/main/java/org/ehcache/sample/security/jwt/JWTFilter.java | package org.ehcache.sample.security.jwt;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.util.StringUtils;
import org.springframework.web.filter.GenericFilterBean;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
/**
* Filters incoming requests and installs a Spring Security principal if a header corresponding to a valid user is
* found.
*/
public class JWTFilter extends GenericFilterBean {
public static final String AUTHORIZATION_HEADER = "Authorization";
private TokenProvider tokenProvider;
public JWTFilter(TokenProvider tokenProvider) {
this.tokenProvider = tokenProvider;
}
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain)
throws IOException, ServletException {
HttpServletRequest httpServletRequest = (HttpServletRequest) servletRequest;
String jwt = resolveToken(httpServletRequest);
if (StringUtils.hasText(jwt) && this.tokenProvider.validateToken(jwt)) {
Authentication authentication = this.tokenProvider.getAuthentication(jwt);
SecurityContextHolder.getContext().setAuthentication(authentication);
}
filterChain.doFilter(servletRequest, servletResponse);
}
private String resolveToken(HttpServletRequest request){
String bearerToken = request.getHeader(AUTHORIZATION_HEADER);
if (StringUtils.hasText(bearerToken) && bearerToken.startsWith("Bearer ")) {
return bearerToken.substring(7);
}
return null;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/SecurityConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/SecurityConfiguration.java | package org.ehcache.sample.config;
import org.ehcache.sample.security.*;
import org.ehcache.sample.security.jwt.*;
import org.springframework.beans.factory.BeanInitializationException;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.http.HttpMethod;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.builders.WebSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.web.filter.CorsFilter;
import org.zalando.problem.spring.web.advice.security.SecurityProblemSupport;
import javax.annotation.PostConstruct;
@Configuration
@EnableWebSecurity
@EnableGlobalMethodSecurity(prePostEnabled = true, securedEnabled = true)
@Import(SecurityProblemSupport.class)
public class SecurityConfiguration extends WebSecurityConfigurerAdapter {
private final AuthenticationManagerBuilder authenticationManagerBuilder;
private final UserDetailsService userDetailsService;
private final TokenProvider tokenProvider;
private final CorsFilter corsFilter;
private final SecurityProblemSupport problemSupport;
public SecurityConfiguration(AuthenticationManagerBuilder authenticationManagerBuilder, UserDetailsService userDetailsService, TokenProvider tokenProvider, CorsFilter corsFilter, SecurityProblemSupport problemSupport) {
this.authenticationManagerBuilder = authenticationManagerBuilder;
this.userDetailsService = userDetailsService;
this.tokenProvider = tokenProvider;
this.corsFilter = corsFilter;
this.problemSupport = problemSupport;
}
@PostConstruct
public void init() {
try {
authenticationManagerBuilder
.userDetailsService(userDetailsService)
.passwordEncoder(passwordEncoder());
} catch (Exception e) {
throw new BeanInitializationException("Security configuration failed", e);
}
}
@Override
@Bean
public AuthenticationManager authenticationManagerBean() throws Exception {
return super.authenticationManagerBean();
}
@Bean
public PasswordEncoder passwordEncoder() {
return new BCryptPasswordEncoder();
}
@Override
public void configure(WebSecurity web) throws Exception {
web.ignoring()
.antMatchers(HttpMethod.OPTIONS, "/**")
.antMatchers("/app/**/*.{js,html}")
.antMatchers("/i18n/**")
.antMatchers("/content/**")
.antMatchers("/h2-console/**")
.antMatchers("/swagger-ui/index.html")
.antMatchers("/test/**");
}
@Override
public void configure(HttpSecurity http) throws Exception {
http
.csrf()
.disable()
.addFilterBefore(corsFilter, UsernamePasswordAuthenticationFilter.class)
.exceptionHandling()
.authenticationEntryPoint(problemSupport)
.accessDeniedHandler(problemSupport)
.and()
.headers()
.frameOptions()
.disable()
.and()
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.STATELESS)
.and()
.authorizeRequests()
.antMatchers("/api/register").permitAll()
.antMatchers("/api/activate").permitAll()
.antMatchers("/api/authenticate").permitAll()
.antMatchers("/api/account/reset-password/init").permitAll()
.antMatchers("/api/account/reset-password/finish").permitAll()
.antMatchers("/api/**").authenticated()
.antMatchers("/management/health").permitAll()
.antMatchers("/management/info").permitAll()
.antMatchers("/management/**").hasAuthority(AuthoritiesConstants.ADMIN)
.and()
.apply(securityConfigurerAdapter());
}
private JWTConfigurer securityConfigurerAdapter() {
return new JWTConfigurer(tokenProvider);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/ApplicationProperties.java | fullstack/src/main/java/org/ehcache/sample/config/ApplicationProperties.java | package org.ehcache.sample.config;
import org.ehcache.clustered.common.Consistency;
import org.springframework.boot.context.properties.ConfigurationProperties;
import java.net.URI;
/**
* Properties specific to Demo.
* <p>
* Properties are configured in the application.yml file.
* See {@link io.github.jhipster.config.JHipsterProperties} for a good example.
*/
@ConfigurationProperties(prefix = "application", ignoreUnknownFields = false)
public class ApplicationProperties {
private String biographiesRemoteLocation;
private String biographiesLocation;
private String googleApiKey;
private String darkSkyApiKey;
private boolean stubWebServices;
private Cluster cluster;
public String getBiographiesRemoteLocation() {
return biographiesRemoteLocation;
}
public void setBiographiesRemoteLocation(String biographiesRemoteLocation) {
this.biographiesRemoteLocation = biographiesRemoteLocation;
}
public String getBiographiesLocation() {
return biographiesLocation;
}
public void setBiographiesLocation(String biographiesLocation) {
this.biographiesLocation = biographiesLocation;
}
public String getGoogleApiKey() {
return googleApiKey;
}
public void setGoogleApiKey(String googleApiKey) {
this.googleApiKey = googleApiKey;
}
public String getDarkSkyApiKey() {
return darkSkyApiKey;
}
public void setDarkSkyApiKey(String darkSkyApiKey) {
this.darkSkyApiKey = darkSkyApiKey;
}
public boolean isStubWebServices() {
return stubWebServices;
}
public void setStubWebServices(boolean stubWebServices) {
this.stubWebServices = stubWebServices;
}
public Cluster getCluster() {
return cluster;
}
public void setCluster(Cluster cluster) {
this.cluster = cluster;
}
public static class Cluster {
private URI uri;
private boolean autoCreate = true;
private Consistency consistency = Consistency.STRONG;
private long sizeInMb = 10;
private String offheapResourceName = "offheap-1";
public URI getUri() {
return uri;
}
public void setUri(URI uri) {
this.uri = uri;
}
public boolean isAutoCreate() {
return autoCreate;
}
public void setAutoCreate(boolean autoCreate) {
this.autoCreate = autoCreate;
}
public Consistency getConsistency() {
return consistency;
}
public void setConsistency(Consistency consistency) {
this.consistency = consistency;
}
public long getSizeInMb() {
return sizeInMb;
}
public void setSizeInMb(long sizeInMb) {
this.sizeInMb = sizeInMb;
}
public String getOffheapResourceName() {
return offheapResourceName;
}
public void setOffheapResourceName(String offheapResourceName) {
this.offheapResourceName = offheapResourceName;
}
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/CloudDatabaseConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/CloudDatabaseConfiguration.java | package org.ehcache.sample.config;
import io.github.jhipster.config.JHipsterConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.config.java.AbstractCloudConfig;
import org.springframework.context.annotation.*;
import javax.sql.DataSource;
import org.springframework.boot.context.properties.ConfigurationProperties;
@Configuration
@Profile(JHipsterConstants.SPRING_PROFILE_CLOUD)
public class CloudDatabaseConfiguration extends AbstractCloudConfig {
private final Logger log = LoggerFactory.getLogger(CloudDatabaseConfiguration.class);
private final String CLOUD_CONFIGURATION_HIKARI_PREFIX = "spring.datasource.hikari";
@Bean
@ConfigurationProperties(CLOUD_CONFIGURATION_HIKARI_PREFIX)
public DataSource dataSource() {
log.info("Configuring JDBC datasource from a cloud provider");
return connectionFactory().dataSource();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/DefaultProfileUtil.java | fullstack/src/main/java/org/ehcache/sample/config/DefaultProfileUtil.java | package org.ehcache.sample.config;
import io.github.jhipster.config.JHipsterConstants;
import org.springframework.boot.SpringApplication;
import org.springframework.core.env.Environment;
import java.util.*;
/**
* Utility class to load a Spring profile to be used as default
* when there is no <code>spring.profiles.active</code> set in the environment or as command line argument.
* If the value is not available in <code>application.yml</code> then <code>dev</code> profile will be used as default.
*/
public final class DefaultProfileUtil {
private static final String SPRING_PROFILE_DEFAULT = "spring.profiles.default";
private DefaultProfileUtil() {
}
/**
* Set a default to use when no profile is configured.
*
* @param app the Spring application
*/
public static void addDefaultProfile(SpringApplication app) {
Map<String, Object> defProperties = new HashMap<>();
/*
* The default profile to use when no other profiles are defined
* This cannot be set in the <code>application.yml</code> file.
* See https://github.com/spring-projects/spring-boot/issues/1219
*/
defProperties.put(SPRING_PROFILE_DEFAULT, JHipsterConstants.SPRING_PROFILE_DEVELOPMENT);
app.setDefaultProperties(defProperties);
}
/**
* Get the profiles that are applied else get default profiles.
*
* @param env spring environment
* @return profiles
*/
public static String[] getActiveProfiles(Environment env) {
String[] profiles = env.getActiveProfiles();
if (profiles.length == 0) {
return env.getDefaultProfiles();
}
return profiles;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/DatabaseConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/DatabaseConfiguration.java | package org.ehcache.sample.config;
import io.github.jhipster.config.JHipsterConstants;
import io.github.jhipster.config.h2.H2ConfigurationHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cache.CacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.core.env.Environment;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import java.sql.SQLException;
import java.lang.NumberFormatException;
@Configuration
@EnableJpaRepositories("org.ehcache.sample.repository")
@EnableJpaAuditing(auditorAwareRef = "springSecurityAuditorAware")
@EnableTransactionManagement
public class DatabaseConfiguration {
private final Logger log = LoggerFactory.getLogger(DatabaseConfiguration.class);
private final Environment env;
private final CacheManager cacheManager;
public DatabaseConfiguration(Environment env, CacheManager cacheManager) {
this.env = env;
this.cacheManager = cacheManager;
}
/**
* Open the TCP port for the H2 database, so it is available remotely.
*
* @return the H2 database TCP server
* @throws SQLException if the server failed to start
*/
@Bean(initMethod = "start", destroyMethod = "stop")
@Profile(JHipsterConstants.SPRING_PROFILE_DEVELOPMENT)
public Object h2TCPServer() throws SQLException {
String port = getValidPortForH2();
log.debug("H2 database is available on port {}", port);
return H2ConfigurationHelper.createServer(port);
}
private String getValidPortForH2() throws NumberFormatException {
int port = Integer.parseInt(env.getProperty("server.port"));
if (port < 10000) {
port = 10000 + port;
} else {
if (port < 63536) {
port = port + 2000;
} else {
port = port - 2000;
}
}
return String.valueOf(port);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/CacheConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/CacheConfiguration.java | package org.ehcache.sample.config;
import io.github.jhipster.config.JHipsterConstants;
import io.github.jhipster.config.JHipsterProperties;
import org.ehcache.clustered.client.config.ClusteredStoreConfiguration;
import org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder;
import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder;
import org.ehcache.clustered.client.config.builders.ServerSideConfigurationBuilder;
import org.ehcache.clustered.common.Consistency;
import org.ehcache.config.builders.CacheConfigurationBuilder;
import org.ehcache.config.builders.ExpiryPolicyBuilder;
import org.ehcache.config.builders.ResourcePoolsBuilder;
import io.github.jhipster.config.jcache.BeanClassLoaderAwareJCacheRegionFactory;
import org.ehcache.config.units.MemoryUnit;
import org.ehcache.core.config.DefaultConfiguration;
import org.ehcache.jsr107.EhcacheCachingProvider;
import org.springframework.cache.annotation.CachingConfigurerSupport;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.jcache.JCacheCacheManager;
import org.springframework.context.annotation.*;
import org.springframework.core.env.Environment;
import java.net.URI;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import javax.cache.CacheManager;
import javax.cache.Caching;
@Configuration
@EnableCaching
public class CacheConfiguration extends CachingConfigurerSupport {
private final Environment environment;
private final JHipsterProperties jHipsterProperties;
private final ApplicationProperties applicationProperties;
public CacheConfiguration(Environment environment, JHipsterProperties jHipsterProperties, ApplicationProperties applicationProperties) {
BeanClassLoaderAwareJCacheRegionFactory.setBeanClassLoader(getClassLoader());
this.environment = environment;
this.jHipsterProperties = jHipsterProperties;
this.applicationProperties = applicationProperties;
}
/**
* Return the class loader to use to retrieve the CacheManager from the Caching Provider
*
* @return class loader to use
*/
private ClassLoader getClassLoader() {
return this.getClass().getClassLoader();
}
private CacheManager getCacheManager(EhcacheCachingProvider provider, DefaultConfiguration configuration) {
return provider.getCacheManager(provider.getDefaultURI(), configuration);
}
@Bean
@Override
public org.springframework.cache.CacheManager cacheManager() {
return new JCacheCacheManager(environment.acceptsProfiles(JHipsterConstants.SPRING_PROFILE_PRODUCTION) ?
createClusteredCacheManager() : createInMemoryCacheManager());
}
private CacheManager createInMemoryCacheManager() {
long cacheSize = jHipsterProperties.getCache().getEhcache().getMaxEntries();
long ttl = jHipsterProperties.getCache().getEhcache().getTimeToLiveSeconds();
org.ehcache.config.CacheConfiguration<Object, Object> cacheConfiguration = CacheConfigurationBuilder
.newCacheConfigurationBuilder(Object.class, Object.class, ResourcePoolsBuilder
.heap(cacheSize))
.withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(ttl)))
.build();
Map<String, org.ehcache.config.CacheConfiguration<?, ?>> caches = createCacheConfigurations(cacheConfiguration);
EhcacheCachingProvider provider = getCachingProvider();
DefaultConfiguration configuration = new DefaultConfiguration(caches, getClassLoader());
return getCacheManager(provider, configuration);
}
private CacheManager createClusteredCacheManager() {
ApplicationProperties.Cluster clusterProperties = applicationProperties.getCluster();
URI clusterUri = clusterProperties.getUri();
boolean autoCreate = clusterProperties.isAutoCreate();
long clusteredCacheSize = clusterProperties.getSizeInMb();
String offheapResourceName = clusterProperties.getOffheapResourceName();
Consistency consistency = clusterProperties.getConsistency();
long heapCacheSize = jHipsterProperties.getCache().getEhcache().getMaxEntries();
long ttl = jHipsterProperties.getCache().getEhcache().getTimeToLiveSeconds();
ClusteringServiceConfigurationBuilder clusteringServiceConfigurationBuilder = ClusteringServiceConfigurationBuilder.cluster(clusterUri);
ServerSideConfigurationBuilder serverSideConfigurationBuilder = (autoCreate ? clusteringServiceConfigurationBuilder.autoCreate() : clusteringServiceConfigurationBuilder.expecting())
.defaultServerResource(offheapResourceName);
org.ehcache.config.CacheConfiguration<Object, Object> cacheConfiguration = CacheConfigurationBuilder
.newCacheConfigurationBuilder(Object.class, Object.class, ResourcePoolsBuilder
.heap(heapCacheSize)
.with(ClusteredResourcePoolBuilder.clusteredDedicated(clusteredCacheSize, MemoryUnit.MB)))
.withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(ttl)))
.add(new ClusteredStoreConfiguration(consistency)).build();
Map<String, org.ehcache.config.CacheConfiguration<?, ?>> caches = createCacheConfigurations(cacheConfiguration);
EhcacheCachingProvider provider = getCachingProvider();
DefaultConfiguration configuration = new DefaultConfiguration(caches, getClassLoader(), serverSideConfigurationBuilder.build());
return getCacheManager(provider, configuration);
}
private Map<String, org.ehcache.config.CacheConfiguration<?, ?>> createCacheConfigurations(org.ehcache.config.CacheConfiguration<Object, Object> cacheConfiguration) {
Map<String, org.ehcache.config.CacheConfiguration<?, ?>> caches = new HashMap<>();
caches.put(org.ehcache.sample.repository.UserRepository.USERS_BY_LOGIN_CACHE, cacheConfiguration);
caches.put(org.ehcache.sample.repository.UserRepository.USERS_BY_EMAIL_CACHE, cacheConfiguration);
caches.put(org.ehcache.sample.domain.User.class.getName(), cacheConfiguration);
caches.put(org.ehcache.sample.domain.Authority.class.getName(), cacheConfiguration);
caches.put(org.ehcache.sample.domain.User.class.getName() + ".authorities", cacheConfiguration);
caches.put(org.ehcache.sample.domain.Actor.class.getName(), cacheConfiguration);
caches.put("weatherReports", cacheConfiguration);
// jhipster-needle-ehcache-add-entry
return caches;
}
private EhcacheCachingProvider getCachingProvider() {
return (EhcacheCachingProvider) Caching.getCachingProvider();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/package-info.java | fullstack/src/main/java/org/ehcache/sample/config/package-info.java | /**
* Spring Framework configuration files.
*/
package org.ehcache.sample.config;
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/DateTimeFormatConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/DateTimeFormatConfiguration.java | package org.ehcache.sample.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.format.FormatterRegistry;
import org.springframework.format.datetime.standard.DateTimeFormatterRegistrar;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* Configure the converters to use the ISO format for dates by default.
*/
@Configuration
public class DateTimeFormatConfiguration implements WebMvcConfigurer {
@Override
public void addFormatters(FormatterRegistry registry) {
DateTimeFormatterRegistrar registrar = new DateTimeFormatterRegistrar();
registrar.setUseIsoFormat(true);
registrar.registerFormatters(registry);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/AsyncConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/AsyncConfiguration.java | package org.ehcache.sample.config;
import io.github.jhipster.async.ExceptionHandlingAsyncTaskExecutor;
import io.github.jhipster.config.JHipsterProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler;
import org.springframework.aop.interceptor.SimpleAsyncUncaughtExceptionHandler;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.*;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.scheduling.annotation.SchedulingConfigurer;
import org.springframework.scheduling.config.ScheduledTaskRegistrar;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
@Configuration
@EnableAsync
@EnableScheduling
public class AsyncConfiguration implements AsyncConfigurer, SchedulingConfigurer {
private final Logger log = LoggerFactory.getLogger(AsyncConfiguration.class);
private final JHipsterProperties jHipsterProperties;
public AsyncConfiguration(JHipsterProperties jHipsterProperties) {
this.jHipsterProperties = jHipsterProperties;
}
@Override
@Bean(name = "taskExecutor")
public Executor getAsyncExecutor() {
log.debug("Creating Async Task Executor");
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(jHipsterProperties.getAsync().getCorePoolSize());
executor.setMaxPoolSize(jHipsterProperties.getAsync().getMaxPoolSize());
executor.setQueueCapacity(jHipsterProperties.getAsync().getQueueCapacity());
executor.setThreadNamePrefix("demo-Executor-");
return new ExceptionHandlingAsyncTaskExecutor(executor);
}
@Override
public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() {
return new SimpleAsyncUncaughtExceptionHandler();
}
@Override
public void configureTasks(ScheduledTaskRegistrar taskRegistrar) {
taskRegistrar.setScheduler(scheduledTaskExecutor());
}
@Bean
public Executor scheduledTaskExecutor() {
return Executors.newScheduledThreadPool(jHipsterProperties.getAsync().getCorePoolSize());
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/WebConfigurer.java | fullstack/src/main/java/org/ehcache/sample/config/WebConfigurer.java | package org.ehcache.sample.config;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.servlet.InstrumentedFilter;
import com.codahale.metrics.servlets.MetricsServlet;
import io.github.jhipster.config.JHipsterConstants;
import io.github.jhipster.config.JHipsterProperties;
import io.github.jhipster.config.h2.H2ConfigurationHelper;
import io.github.jhipster.web.filter.CachingHttpHeadersFilter;
import io.undertow.UndertowOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.web.embedded.undertow.UndertowServletWebServerFactory;
import org.springframework.boot.web.server.*;
import org.springframework.boot.web.servlet.ServletContextInitializer;
import org.springframework.boot.web.servlet.server.ConfigurableServletWebServerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.http.MediaType;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
import org.springframework.web.filter.CorsFilter;
import javax.servlet.*;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.*;
import static java.net.URLDecoder.decode;
/**
* Configuration of web application with Servlet 3.0 APIs.
*/
@Configuration
public class WebConfigurer implements ServletContextInitializer, WebServerFactoryCustomizer<WebServerFactory> {
private final Logger log = LoggerFactory.getLogger(WebConfigurer.class);
private final Environment env;
private final JHipsterProperties jHipsterProperties;
private MetricRegistry metricRegistry;
public WebConfigurer(Environment env, JHipsterProperties jHipsterProperties) {
this.env = env;
this.jHipsterProperties = jHipsterProperties;
}
@Override
public void onStartup(ServletContext servletContext) throws ServletException {
if (env.getActiveProfiles().length != 0) {
log.info("Web application configuration, using profiles: {}", (Object[]) env.getActiveProfiles());
}
EnumSet<DispatcherType> disps = EnumSet.of(DispatcherType.REQUEST, DispatcherType.FORWARD, DispatcherType.ASYNC);
initMetrics(servletContext, disps);
if (env.acceptsProfiles(JHipsterConstants.SPRING_PROFILE_PRODUCTION)) {
initCachingHttpHeadersFilter(servletContext, disps);
}
if (env.acceptsProfiles(JHipsterConstants.SPRING_PROFILE_DEVELOPMENT)) {
initH2Console(servletContext);
}
log.info("Web application fully configured");
}
/**
* Customize the Servlet engine: Mime types, the document root, the cache.
*/
@Override
public void customize(WebServerFactory server) {
setMimeMappings(server);
// When running in an IDE or with ./mvnw spring-boot:run, set location of the static web assets.
setLocationForStaticAssets(server);
/*
* Enable HTTP/2 for Undertow - https://twitter.com/ankinson/status/829256167700492288
* HTTP/2 requires HTTPS, so HTTP requests will fallback to HTTP/1.1.
* See the JHipsterProperties class and your application-*.yml configuration files
* for more information.
*/
if (jHipsterProperties.getHttp().getVersion().equals(JHipsterProperties.Http.Version.V_2_0) &&
server instanceof UndertowServletWebServerFactory) {
((UndertowServletWebServerFactory) server)
.addBuilderCustomizers(builder ->
builder.setServerOption(UndertowOptions.ENABLE_HTTP2, true));
}
}
private void setMimeMappings(WebServerFactory server) {
if (server instanceof ConfigurableServletWebServerFactory) {
MimeMappings mappings = new MimeMappings(MimeMappings.DEFAULT);
// IE issue, see https://github.com/jhipster/generator-jhipster/pull/711
mappings.add("html", MediaType.TEXT_HTML_VALUE + ";charset=" + StandardCharsets.UTF_8.name().toLowerCase());
// CloudFoundry issue, see https://github.com/cloudfoundry/gorouter/issues/64
mappings.add("json", MediaType.TEXT_HTML_VALUE + ";charset=" + StandardCharsets.UTF_8.name().toLowerCase());
ConfigurableServletWebServerFactory servletWebServer = (ConfigurableServletWebServerFactory) server;
servletWebServer.setMimeMappings(mappings);
}
}
private void setLocationForStaticAssets(WebServerFactory server) {
if (server instanceof ConfigurableServletWebServerFactory) {
ConfigurableServletWebServerFactory servletWebServer = (ConfigurableServletWebServerFactory) server;
File root;
String prefixPath = resolvePathPrefix();
root = new File(prefixPath + "target/www/");
if (root.exists() && root.isDirectory()) {
servletWebServer.setDocumentRoot(root);
}
}
}
/**
* Resolve path prefix to static resources.
*/
private String resolvePathPrefix() {
String fullExecutablePath;
try {
fullExecutablePath = decode(this.getClass().getResource("").getPath(), StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
/* try without decoding if this ever happens */
fullExecutablePath = this.getClass().getResource("").getPath();
}
String rootPath = Paths.get(".").toUri().normalize().getPath();
String extractedPath = fullExecutablePath.replace(rootPath, "");
int extractionEndIndex = extractedPath.indexOf("target/");
if (extractionEndIndex <= 0) {
return "";
}
return extractedPath.substring(0, extractionEndIndex);
}
/**
* Initializes the caching HTTP Headers Filter.
*/
private void initCachingHttpHeadersFilter(ServletContext servletContext,
EnumSet<DispatcherType> disps) {
log.debug("Registering Caching HTTP Headers Filter");
FilterRegistration.Dynamic cachingHttpHeadersFilter =
servletContext.addFilter("cachingHttpHeadersFilter",
new CachingHttpHeadersFilter(jHipsterProperties));
cachingHttpHeadersFilter.addMappingForUrlPatterns(disps, true, "/i18n/*");
cachingHttpHeadersFilter.addMappingForUrlPatterns(disps, true, "/content/*");
cachingHttpHeadersFilter.addMappingForUrlPatterns(disps, true, "/app/*");
cachingHttpHeadersFilter.setAsyncSupported(true);
}
/**
* Initializes Metrics.
*/
private void initMetrics(ServletContext servletContext, EnumSet<DispatcherType> disps) {
log.debug("Initializing Metrics registries");
servletContext.setAttribute(InstrumentedFilter.REGISTRY_ATTRIBUTE,
metricRegistry);
servletContext.setAttribute(MetricsServlet.METRICS_REGISTRY,
metricRegistry);
log.debug("Registering Metrics Filter");
FilterRegistration.Dynamic metricsFilter = servletContext.addFilter("webappMetricsFilter",
new InstrumentedFilter());
metricsFilter.addMappingForUrlPatterns(disps, true, "/*");
metricsFilter.setAsyncSupported(true);
log.debug("Registering Metrics Servlet");
ServletRegistration.Dynamic metricsAdminServlet =
servletContext.addServlet("metricsServlet", new MetricsServlet());
metricsAdminServlet.addMapping("/management/metrics/*");
metricsAdminServlet.setAsyncSupported(true);
metricsAdminServlet.setLoadOnStartup(2);
}
@Bean
public CorsFilter corsFilter() {
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
CorsConfiguration config = jHipsterProperties.getCors();
if (config.getAllowedOrigins() != null && !config.getAllowedOrigins().isEmpty()) {
log.debug("Registering CORS filter");
source.registerCorsConfiguration("/api/**", config);
source.registerCorsConfiguration("/management/**", config);
source.registerCorsConfiguration("/v2/api-docs", config);
}
return new CorsFilter(source);
}
/**
* Initializes H2 console.
*/
private void initH2Console(ServletContext servletContext) {
log.debug("Initialize H2 console");
H2ConfigurationHelper.initH2Console(servletContext);
}
@Autowired(required = false)
public void setMetricRegistry(MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/JacksonConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/JacksonConfiguration.java | package org.ehcache.sample.config;
import com.fasterxml.jackson.datatype.hibernate5.Hibernate5Module;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.zalando.problem.ProblemModule;
import org.zalando.problem.violations.ConstraintViolationProblemModule;
@Configuration
public class JacksonConfiguration {
/**
* Support for Java date and time API.
* @return the corresponding Jackson module.
*/
@Bean
public JavaTimeModule javaTimeModule() {
return new JavaTimeModule();
}
@Bean
public Jdk8Module jdk8TimeModule() {
return new Jdk8Module();
}
/*
* Support for Hibernate types in Jackson.
*/
@Bean
public Hibernate5Module hibernate5Module() {
return new Hibernate5Module();
}
/*
* Jackson Afterburner module to speed up serialization/deserialization.
*/
@Bean
public AfterburnerModule afterburnerModule() {
return new AfterburnerModule();
}
/*
* Module for serialization/deserialization of RFC7807 Problem.
*/
@Bean
ProblemModule problemModule() {
return new ProblemModule();
}
/*
* Module for serialization/deserialization of ConstraintViolationProblem.
*/
@Bean
ConstraintViolationProblemModule constraintViolationProblemModule() {
return new ConstraintViolationProblemModule();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/LoggingConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/LoggingConfiguration.java | package org.ehcache.sample.config;
import java.net.InetSocketAddress;
import java.util.Iterator;
import io.github.jhipster.config.JHipsterProperties;
import ch.qos.logback.classic.AsyncAppender;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.boolex.OnMarkerEvaluator;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.classic.spi.LoggerContextListener;
import ch.qos.logback.core.Appender;
import ch.qos.logback.core.filter.EvaluatorFilter;
import ch.qos.logback.core.spi.ContextAwareBase;
import ch.qos.logback.core.spi.FilterReply;
import net.logstash.logback.appender.LogstashTcpSocketAppender;
import net.logstash.logback.encoder.LogstashEncoder;
import net.logstash.logback.stacktrace.ShortenedThrowableConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
@Configuration
public class LoggingConfiguration {
private static final String LOGSTASH_APPENDER_NAME = "LOGSTASH";
private static final String ASYNC_LOGSTASH_APPENDER_NAME = "ASYNC_LOGSTASH";
private final Logger log = LoggerFactory.getLogger(LoggingConfiguration.class);
private LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
private final String appName;
private final String serverPort;
private final JHipsterProperties jHipsterProperties;
public LoggingConfiguration(@Value("${spring.application.name}") String appName, @Value("${server.port}") String serverPort,
JHipsterProperties jHipsterProperties) {
this.appName = appName;
this.serverPort = serverPort;
this.jHipsterProperties = jHipsterProperties;
if (jHipsterProperties.getLogging().getLogstash().isEnabled()) {
addLogstashAppender(context);
addContextListener(context);
}
if (jHipsterProperties.getMetrics().getLogs().isEnabled()) {
setMetricsMarkerLogbackFilter(context);
}
}
private void addContextListener(LoggerContext context) {
LogbackLoggerContextListener loggerContextListener = new LogbackLoggerContextListener();
loggerContextListener.setContext(context);
context.addListener(loggerContextListener);
}
private void addLogstashAppender(LoggerContext context) {
log.info("Initializing Logstash logging");
LogstashTcpSocketAppender logstashAppender = new LogstashTcpSocketAppender();
logstashAppender.setName(LOGSTASH_APPENDER_NAME);
logstashAppender.setContext(context);
String customFields = "{\"app_name\":\"" + appName + "\",\"app_port\":\"" + serverPort + "\"}";
// More documentation is available at: https://github.com/logstash/logstash-logback-encoder
LogstashEncoder logstashEncoder = new LogstashEncoder();
// Set the Logstash appender config from JHipster properties
logstashEncoder.setCustomFields(customFields);
// Set the Logstash appender config from JHipster properties
logstashAppender.addDestinations(new InetSocketAddress(jHipsterProperties.getLogging().getLogstash().getHost(), jHipsterProperties.getLogging().getLogstash().getPort()));
ShortenedThrowableConverter throwableConverter = new ShortenedThrowableConverter();
throwableConverter.setRootCauseFirst(true);
logstashEncoder.setThrowableConverter(throwableConverter);
logstashEncoder.setCustomFields(customFields);
logstashAppender.setEncoder(logstashEncoder);
logstashAppender.start();
// Wrap the appender in an Async appender for performance
AsyncAppender asyncLogstashAppender = new AsyncAppender();
asyncLogstashAppender.setContext(context);
asyncLogstashAppender.setName(ASYNC_LOGSTASH_APPENDER_NAME);
asyncLogstashAppender.setQueueSize(jHipsterProperties.getLogging().getLogstash().getQueueSize());
asyncLogstashAppender.addAppender(logstashAppender);
asyncLogstashAppender.start();
context.getLogger("ROOT").addAppender(asyncLogstashAppender);
}
// Configure a log filter to remove "metrics" logs from all appenders except the "LOGSTASH" appender
private void setMetricsMarkerLogbackFilter(LoggerContext context) {
log.info("Filtering metrics logs from all appenders except the {} appender", LOGSTASH_APPENDER_NAME);
OnMarkerEvaluator onMarkerMetricsEvaluator = new OnMarkerEvaluator();
onMarkerMetricsEvaluator.setContext(context);
onMarkerMetricsEvaluator.addMarker("metrics");
onMarkerMetricsEvaluator.start();
EvaluatorFilter<ILoggingEvent> metricsFilter = new EvaluatorFilter<>();
metricsFilter.setContext(context);
metricsFilter.setEvaluator(onMarkerMetricsEvaluator);
metricsFilter.setOnMatch(FilterReply.DENY);
metricsFilter.start();
for (ch.qos.logback.classic.Logger logger : context.getLoggerList()) {
for (Iterator<Appender<ILoggingEvent>> it = logger.iteratorForAppenders(); it.hasNext();) {
Appender<ILoggingEvent> appender = it.next();
if (!appender.getName().equals(ASYNC_LOGSTASH_APPENDER_NAME)) {
log.debug("Filter metrics logs from the {} appender", appender.getName());
appender.setContext(context);
appender.addFilter(metricsFilter);
appender.start();
}
}
}
}
/**
* Logback configuration is achieved by configuration file and API.
* When configuration file change is detected, the configuration is reset.
* This listener ensures that the programmatic configuration is also re-applied after reset.
*/
class LogbackLoggerContextListener extends ContextAwareBase implements LoggerContextListener {
@Override
public boolean isResetResistant() {
return true;
}
@Override
public void onStart(LoggerContext context) {
addLogstashAppender(context);
}
@Override
public void onReset(LoggerContext context) {
addLogstashAppender(context);
}
@Override
public void onStop(LoggerContext context) {
// Nothing to do.
}
@Override
public void onLevelChange(ch.qos.logback.classic.Logger logger, Level level) {
// Nothing to do.
}
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/MetricsConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/MetricsConfiguration.java | package org.ehcache.sample.config;
import io.github.jhipster.config.JHipsterProperties;
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.JvmAttributeGaugeSet;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Slf4jReporter;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.codahale.metrics.jcache.JCacheGaugeSet;
import com.codahale.metrics.jvm.*;
import com.ryantenney.metrics.spring.config.annotation.EnableMetrics;
import com.ryantenney.metrics.spring.config.annotation.MetricsConfigurerAdapter;
import com.zaxxer.hikari.HikariDataSource;
import io.prometheus.client.CollectorRegistry;
import io.prometheus.client.dropwizard.DropwizardExports;
import io.prometheus.client.exporter.MetricsServlet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.web.servlet.ServletContextInitializer;
import org.springframework.cache.CacheManager;
import org.springframework.context.annotation.*;
import javax.annotation.PostConstruct;
import javax.servlet.ServletContext;
import java.lang.management.ManagementFactory;
import java.util.concurrent.TimeUnit;
@Configuration
@EnableMetrics(proxyTargetClass = true)
public class MetricsConfiguration extends MetricsConfigurerAdapter implements ServletContextInitializer {
private static final String PROP_METRIC_REG_JVM_MEMORY = "jvm.memory";
private static final String PROP_METRIC_REG_JVM_GARBAGE = "jvm.garbage";
private static final String PROP_METRIC_REG_JVM_THREADS = "jvm.threads";
private static final String PROP_METRIC_REG_JVM_FILES = "jvm.files";
private static final String PROP_METRIC_REG_JVM_BUFFERS = "jvm.buffers";
private static final String PROP_METRIC_REG_JVM_ATTRIBUTE_SET = "jvm.attributes";
private static final String PROP_METRIC_REG_JCACHE_STATISTICS = "jcache.statistics";
private final Logger log = LoggerFactory.getLogger(MetricsConfiguration.class);
private MetricRegistry metricRegistry = new MetricRegistry();
private HealthCheckRegistry healthCheckRegistry = new HealthCheckRegistry();
private final JHipsterProperties jHipsterProperties;
private HikariDataSource hikariDataSource;
// The cacheManager is injected here to force its initialization, so the JCacheGaugeSet
// will be correctly created below.
public MetricsConfiguration(JHipsterProperties jHipsterProperties, CacheManager cacheManager) {
this.jHipsterProperties = jHipsterProperties;
}
@Autowired(required = false)
public void setHikariDataSource(HikariDataSource hikariDataSource) {
this.hikariDataSource = hikariDataSource;
}
@Override
@Bean
public MetricRegistry getMetricRegistry() {
return metricRegistry;
}
@Override
@Bean
public HealthCheckRegistry getHealthCheckRegistry() {
return healthCheckRegistry;
}
@PostConstruct
public void init() {
log.debug("Registering JVM gauges");
metricRegistry.register(PROP_METRIC_REG_JVM_MEMORY, new MemoryUsageGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_GARBAGE, new GarbageCollectorMetricSet());
metricRegistry.register(PROP_METRIC_REG_JVM_THREADS, new ThreadStatesGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_FILES, new FileDescriptorRatioGauge());
metricRegistry.register(PROP_METRIC_REG_JVM_BUFFERS, new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer()));
metricRegistry.register(PROP_METRIC_REG_JVM_ATTRIBUTE_SET, new JvmAttributeGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JCACHE_STATISTICS, new JCacheGaugeSet());
if (hikariDataSource != null) {
log.debug("Monitoring the datasource");
// remove the factory created by HikariDataSourceMetricsPostProcessor until JHipster migrate to Micrometer
hikariDataSource.setMetricsTrackerFactory(null);
hikariDataSource.setMetricRegistry(metricRegistry);
}
if (jHipsterProperties.getMetrics().getJmx().isEnabled()) {
log.debug("Initializing Metrics JMX reporting");
JmxReporter jmxReporter = JmxReporter.forRegistry(metricRegistry).build();
jmxReporter.start();
}
if (jHipsterProperties.getMetrics().getLogs().isEnabled()) {
log.info("Initializing Metrics Log reporting");
Marker metricsMarker = MarkerFactory.getMarker("metrics");
final Slf4jReporter reporter = Slf4jReporter.forRegistry(metricRegistry)
.outputTo(LoggerFactory.getLogger("metrics"))
.markWith(metricsMarker)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.build();
reporter.start(jHipsterProperties.getMetrics().getLogs().getReportFrequency(), TimeUnit.SECONDS);
}
}
@Override
public void onStartup(ServletContext servletContext) {
if (jHipsterProperties.getMetrics().getPrometheus().isEnabled()) {
String endpoint = jHipsterProperties.getMetrics().getPrometheus().getEndpoint();
log.debug("Initializing prometheus metrics exporting via {}", endpoint);
CollectorRegistry.defaultRegistry.register(new DropwizardExports(metricRegistry));
servletContext
.addServlet("prometheusMetrics", new MetricsServlet(CollectorRegistry.defaultRegistry))
.addMapping(endpoint);
}
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/LiquibaseConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/LiquibaseConfiguration.java | package org.ehcache.sample.config;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.liquibase.LiquibaseProperties;
import org.springframework.cache.CacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.core.task.TaskExecutor;
import io.github.jhipster.config.JHipsterConstants;
import io.github.jhipster.config.liquibase.AsyncSpringLiquibase;
import liquibase.integration.spring.SpringLiquibase;
@Configuration
public class LiquibaseConfiguration {
private final Logger log = LoggerFactory.getLogger(LiquibaseConfiguration.class);
private final Environment env;
private final CacheManager cacheManager;
public LiquibaseConfiguration(Environment env, CacheManager cacheManager) {
this.env = env;
this.cacheManager = cacheManager;
}
@Bean
public SpringLiquibase liquibase(@Qualifier("taskExecutor") TaskExecutor taskExecutor,
DataSource dataSource, LiquibaseProperties liquibaseProperties) {
// Use liquibase.integration.spring.SpringLiquibase if you don't want Liquibase to start asynchronously
SpringLiquibase liquibase = new AsyncSpringLiquibase(taskExecutor, env);
liquibase.setDataSource(dataSource);
liquibase.setChangeLog("classpath:config/liquibase/master.xml");
liquibase.setContexts(liquibaseProperties.getContexts());
liquibase.setDefaultSchema(liquibaseProperties.getDefaultSchema());
liquibase.setDropFirst(liquibaseProperties.isDropFirst());
liquibase.setChangeLogParameters(liquibaseProperties.getParameters());
if (env.acceptsProfiles(JHipsterConstants.SPRING_PROFILE_NO_LIQUIBASE)) {
liquibase.setShouldRun(false);
} else {
liquibase.setShouldRun(liquibaseProperties.isEnabled());
log.debug("Configuring Liquibase");
}
return liquibase;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/CustomInfoContributor.java | fullstack/src/main/java/org/ehcache/sample/config/CustomInfoContributor.java | package org.ehcache.sample.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.actuate.info.Info;
import org.springframework.boot.actuate.info.InfoContributor;
import org.springframework.stereotype.Component;
import java.net.InetAddress;
import java.net.UnknownHostException;
/**
* @author Henri Tremblay
*/
@Component
public class CustomInfoContributor implements InfoContributor {
private final Logger log = LoggerFactory.getLogger(CustomInfoContributor.class);
@Override
public void contribute(Info.Builder builder) {
String hostname;
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
log.warn("Problem trying to resolve the hostname", e);
hostname = "UNKNOWN";
}
builder.withDetail("hostname", hostname);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/Constants.java | fullstack/src/main/java/org/ehcache/sample/config/Constants.java | package org.ehcache.sample.config;
/**
* Application constants.
*/
public final class Constants {
// Regex for acceptable logins
public static final String LOGIN_REGEX = "^[_.@A-Za-z0-9-]*$";
public static final String SYSTEM_ACCOUNT = "system";
public static final String ANONYMOUS_USER = "anonymoususer";
public static final String DEFAULT_LANGUAGE = "en";
private Constants() {
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/LoggingAspectConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/LoggingAspectConfiguration.java | package org.ehcache.sample.config;
import org.ehcache.sample.aop.logging.LoggingAspect;
import io.github.jhipster.config.JHipsterConstants;
import org.springframework.context.annotation.*;
import org.springframework.core.env.Environment;
@Configuration
@EnableAspectJAutoProxy
public class LoggingAspectConfiguration {
@Bean
@Profile(JHipsterConstants.SPRING_PROFILE_DEVELOPMENT)
public LoggingAspect loggingAspect(Environment env) {
return new LoggingAspect(env);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/LocaleConfiguration.java | fullstack/src/main/java/org/ehcache/sample/config/LocaleConfiguration.java | package org.ehcache.sample.config;
import io.github.jhipster.config.locale.AngularCookieLocaleResolver;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.LocaleResolver;
import org.springframework.web.servlet.config.annotation.*;
import org.springframework.web.servlet.i18n.LocaleChangeInterceptor;
@Configuration
public class LocaleConfiguration implements WebMvcConfigurer {
@Bean(name = "localeResolver")
public LocaleResolver localeResolver() {
AngularCookieLocaleResolver cookieLocaleResolver = new AngularCookieLocaleResolver();
cookieLocaleResolver.setCookieName("NG_TRANSLATE_LANG_KEY");
return cookieLocaleResolver;
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
LocaleChangeInterceptor localeChangeInterceptor = new LocaleChangeInterceptor();
localeChangeInterceptor.setParamName("language");
registry.addInterceptor(localeChangeInterceptor);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/audit/AuditEventConverter.java | fullstack/src/main/java/org/ehcache/sample/config/audit/AuditEventConverter.java | package org.ehcache.sample.config.audit;
import org.ehcache.sample.domain.PersistentAuditEvent;
import org.springframework.boot.actuate.audit.AuditEvent;
import org.springframework.security.web.authentication.WebAuthenticationDetails;
import org.springframework.stereotype.Component;
import java.util.*;
@Component
public class AuditEventConverter {
/**
* Convert a list of PersistentAuditEvent to a list of AuditEvent
*
* @param persistentAuditEvents the list to convert
* @return the converted list.
*/
public List<AuditEvent> convertToAuditEvent(Iterable<PersistentAuditEvent> persistentAuditEvents) {
if (persistentAuditEvents == null) {
return Collections.emptyList();
}
List<AuditEvent> auditEvents = new ArrayList<>();
for (PersistentAuditEvent persistentAuditEvent : persistentAuditEvents) {
auditEvents.add(convertToAuditEvent(persistentAuditEvent));
}
return auditEvents;
}
/**
* Convert a PersistentAuditEvent to an AuditEvent
*
* @param persistentAuditEvent the event to convert
* @return the converted list.
*/
public AuditEvent convertToAuditEvent(PersistentAuditEvent persistentAuditEvent) {
if (persistentAuditEvent == null) {
return null;
}
return new AuditEvent(persistentAuditEvent.getAuditEventDate(), persistentAuditEvent.getPrincipal(),
persistentAuditEvent.getAuditEventType(), convertDataToObjects(persistentAuditEvent.getData()));
}
/**
* Internal conversion. This is needed to support the current SpringBoot actuator AuditEventRepository interface
*
* @param data the data to convert
* @return a map of String, Object
*/
public Map<String, Object> convertDataToObjects(Map<String, String> data) {
Map<String, Object> results = new HashMap<>();
if (data != null) {
for (Map.Entry<String, String> entry : data.entrySet()) {
results.put(entry.getKey(), entry.getValue());
}
}
return results;
}
/**
* Internal conversion. This method will allow to save additional data.
* By default, it will save the object as string
*
* @param data the data to convert
* @return a map of String, String
*/
public Map<String, String> convertDataToStrings(Map<String, Object> data) {
Map<String, String> results = new HashMap<>();
if (data != null) {
for (Map.Entry<String, Object> entry : data.entrySet()) {
// Extract the data that will be saved.
if (entry.getValue() instanceof WebAuthenticationDetails) {
WebAuthenticationDetails authenticationDetails = (WebAuthenticationDetails) entry.getValue();
results.put("remoteAddress", authenticationDetails.getRemoteAddress());
results.put("sessionId", authenticationDetails.getSessionId());
} else {
results.put(entry.getKey(), Objects.toString(entry.getValue()));
}
}
}
return results;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/config/audit/package-info.java | fullstack/src/main/java/org/ehcache/sample/config/audit/package-info.java | /**
* Audit specific code.
*/
package org.ehcache.sample.config.audit;
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/AccountResource.java | fullstack/src/main/java/org/ehcache/sample/web/rest/AccountResource.java | package org.ehcache.sample.web.rest;
import com.codahale.metrics.annotation.Timed;
import org.ehcache.sample.domain.User;
import org.ehcache.sample.repository.UserRepository;
import org.ehcache.sample.security.SecurityUtils;
import org.ehcache.sample.service.MailService;
import org.ehcache.sample.service.UserService;
import org.ehcache.sample.service.dto.PasswordChangeDTO;
import org.ehcache.sample.service.dto.UserDTO;
import org.ehcache.sample.web.rest.errors.*;
import org.ehcache.sample.web.rest.vm.KeyAndPasswordVM;
import org.ehcache.sample.web.rest.vm.ManagedUserVM;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import java.util.*;
/**
* REST controller for managing the current user's account.
*/
@RestController
@RequestMapping("/api")
public class AccountResource {
private final Logger log = LoggerFactory.getLogger(AccountResource.class);
private final UserRepository userRepository;
private final UserService userService;
private final MailService mailService;
public AccountResource(UserRepository userRepository, UserService userService, MailService mailService) {
this.userRepository = userRepository;
this.userService = userService;
this.mailService = mailService;
}
/**
* POST /register : register the user.
*
* @param managedUserVM the managed user View Model
* @throws InvalidPasswordException 400 (Bad Request) if the password is incorrect
* @throws EmailAlreadyUsedException 400 (Bad Request) if the email is already used
* @throws LoginAlreadyUsedException 400 (Bad Request) if the login is already used
*/
@PostMapping("/register")
@Timed
@ResponseStatus(HttpStatus.CREATED)
public void registerAccount(@Valid @RequestBody ManagedUserVM managedUserVM) {
if (!checkPasswordLength(managedUserVM.getPassword())) {
throw new InvalidPasswordException();
}
User user = userService.registerUser(managedUserVM, managedUserVM.getPassword());
mailService.sendActivationEmail(user);
}
/**
* GET /activate : activate the registered user.
*
* @param key the activation key
* @throws RuntimeException 500 (Internal Server Error) if the user couldn't be activated
*/
@GetMapping("/activate")
@Timed
public void activateAccount(@RequestParam(value = "key") String key) {
Optional<User> user = userService.activateRegistration(key);
if (!user.isPresent()) {
throw new InternalServerErrorException("No user was found for this activation key");
}
}
/**
* GET /authenticate : check if the user is authenticated, and return its login.
*
* @param request the HTTP request
* @return the login if the user is authenticated
*/
@GetMapping("/authenticate")
@Timed
public String isAuthenticated(HttpServletRequest request) {
log.debug("REST request to check if the current user is authenticated");
return request.getRemoteUser();
}
/**
* GET /account : get the current user.
*
* @return the current user
* @throws RuntimeException 500 (Internal Server Error) if the user couldn't be returned
*/
@GetMapping("/account")
@Timed
public UserDTO getAccount() {
return userService.getUserWithAuthorities()
.map(UserDTO::new)
.orElseThrow(() -> new InternalServerErrorException("User could not be found"));
}
/**
* POST /account : update the current user information.
*
* @param userDTO the current user information
* @throws EmailAlreadyUsedException 400 (Bad Request) if the email is already used
* @throws RuntimeException 500 (Internal Server Error) if the user login wasn't found
*/
@PostMapping("/account")
@Timed
public void saveAccount(@Valid @RequestBody UserDTO userDTO) {
final String userLogin = SecurityUtils.getCurrentUserLogin().orElseThrow(() -> new InternalServerErrorException("Current user login not found"));
Optional<User> existingUser = userRepository.findOneByEmailIgnoreCase(userDTO.getEmail());
if (existingUser.isPresent() && (!existingUser.get().getLogin().equalsIgnoreCase(userLogin))) {
throw new EmailAlreadyUsedException();
}
Optional<User> user = userRepository.findOneByLogin(userLogin);
if (!user.isPresent()) {
throw new InternalServerErrorException("User could not be found");
}
userService.updateUser(userDTO.getFirstName(), userDTO.getLastName(), userDTO.getEmail(),
userDTO.getLangKey(), userDTO.getImageUrl());
}
/**
* POST /account/change-password : changes the current user's password
*
* @param passwordChangeDto current and new password
* @throws InvalidPasswordException 400 (Bad Request) if the new password is incorrect
*/
@PostMapping(path = "/account/change-password")
@Timed
public void changePassword(@RequestBody PasswordChangeDTO passwordChangeDto) {
if (!checkPasswordLength(passwordChangeDto.getNewPassword())) {
throw new InvalidPasswordException();
}
userService.changePassword(passwordChangeDto.getCurrentPassword(), passwordChangeDto.getNewPassword());
}
/**
* POST /account/reset-password/init : Send an email to reset the password of the user
*
* @param mail the mail of the user
* @throws EmailNotFoundException 400 (Bad Request) if the email address is not registered
*/
@PostMapping(path = "/account/reset-password/init")
@Timed
public void requestPasswordReset(@RequestBody String mail) {
mailService.sendPasswordResetMail(
userService.requestPasswordReset(mail)
.orElseThrow(EmailNotFoundException::new)
);
}
/**
* POST /account/reset-password/finish : Finish to reset the password of the user
*
* @param keyAndPassword the generated key and the new password
* @throws InvalidPasswordException 400 (Bad Request) if the password is incorrect
* @throws RuntimeException 500 (Internal Server Error) if the password could not be reset
*/
@PostMapping(path = "/account/reset-password/finish")
@Timed
public void finishPasswordReset(@RequestBody KeyAndPasswordVM keyAndPassword) {
if (!checkPasswordLength(keyAndPassword.getNewPassword())) {
throw new InvalidPasswordException();
}
Optional<User> user =
userService.completePasswordReset(keyAndPassword.getNewPassword(), keyAndPassword.getKey());
if (!user.isPresent()) {
throw new InternalServerErrorException("No user was found for this reset key");
}
}
private static boolean checkPasswordLength(String password) {
return !StringUtils.isEmpty(password) &&
password.length() >= ManagedUserVM.PASSWORD_MIN_LENGTH &&
password.length() <= ManagedUserVM.PASSWORD_MAX_LENGTH;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/UserJWTController.java | fullstack/src/main/java/org/ehcache/sample/web/rest/UserJWTController.java | package org.ehcache.sample.web.rest;
import org.ehcache.sample.security.jwt.JWTFilter;
import org.ehcache.sample.security.jwt.TokenProvider;
import org.ehcache.sample.web.rest.vm.LoginVM;
import com.codahale.metrics.annotation.Timed;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
/**
* Controller to authenticate users.
*/
@RestController
@RequestMapping("/api")
public class UserJWTController {
private final TokenProvider tokenProvider;
private final AuthenticationManager authenticationManager;
public UserJWTController(TokenProvider tokenProvider, AuthenticationManager authenticationManager) {
this.tokenProvider = tokenProvider;
this.authenticationManager = authenticationManager;
}
@PostMapping("/authenticate")
@Timed
public ResponseEntity<JWTToken> authorize(@Valid @RequestBody LoginVM loginVM) {
UsernamePasswordAuthenticationToken authenticationToken =
new UsernamePasswordAuthenticationToken(loginVM.getUsername(), loginVM.getPassword());
Authentication authentication = this.authenticationManager.authenticate(authenticationToken);
SecurityContextHolder.getContext().setAuthentication(authentication);
boolean rememberMe = (loginVM.isRememberMe() == null) ? false : loginVM.isRememberMe();
String jwt = tokenProvider.createToken(authentication, rememberMe);
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.add(JWTFilter.AUTHORIZATION_HEADER, "Bearer " + jwt);
return new ResponseEntity<>(new JWTToken(jwt), httpHeaders, HttpStatus.OK);
}
/**
* Object to return as body in JWT Authentication.
*/
static class JWTToken {
private String idToken;
JWTToken(String idToken) {
this.idToken = idToken;
}
@JsonProperty("id_token")
String getIdToken() {
return idToken;
}
void setIdToken(String idToken) {
this.idToken = idToken;
}
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/package-info.java | fullstack/src/main/java/org/ehcache/sample/web/rest/package-info.java | /**
* Spring MVC REST controllers.
*/
package org.ehcache.sample.web.rest;
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/StarResource.java | fullstack/src/main/java/org/ehcache/sample/web/rest/StarResource.java | package org.ehcache.sample.web.rest;
import org.ehcache.sample.domain.Actor;
import org.ehcache.sample.repository.ActorRepository;
import org.ehcache.sample.service.ActorService;
import org.ehcache.sample.service.ResourceCallService;
import org.ehcache.sample.service.WeatherService;
import org.ehcache.sample.service.dto.ResourceType;
import org.ehcache.sample.service.dto.StarDTO;
import org.ehcache.sample.service.dto.WeatherReport;
import org.ehcache.sample.web.rest.util.PaginationUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.codahale.metrics.annotation.Timed;
import com.google.common.base.Stopwatch;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import javax.annotation.PostConstruct;
import static java.net.InetAddress.*;
@RestController
@RequestMapping("/api")
public class StarResource {
private static final String MONTREAL = "Montréal, Québec, Canada";
private static final String PARIS = "Paris, Ile de France, France";
private static final String MOSCOW = "Moscow, Russia";
private static final String TOKYO = "Tokyo, Japan";
private final Logger log = LoggerFactory.getLogger(StarResource.class);
private final ActorService actorService;
private final WeatherService weatherService;
private final ResourceCallService resourceCallService;
private String hostname;
public StarResource(ActorService actorService, WeatherService weatherService, ResourceCallService resourceCallService) {
this.actorService = actorService;
this.weatherService = weatherService;
this.resourceCallService = resourceCallService;
}
@PostConstruct
public void init() throws UnknownHostException {
hostname = getLocalHost().getHostName();
}
/**
* GET /stars : get all the stars.
*
* @param pageable the pagination information
* @return the ResponseEntity with status 200 (OK) and the list of stars in body
*/
@GetMapping("/stars")
@Timed
public ResponseEntity<List<Actor>> getAllStars(Pageable pageable) {
log.debug("REST request to get a page of Stars");
Page<Actor> page = actorService.findAll(pageable);
HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/api/stars");
return new ResponseEntity<>(page.getContent(), headers, HttpStatus.OK);
}
@RequestMapping(value = "/stars/{id}",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
public ResponseEntity<StarDTO> starDetails(@PathVariable("id") long id) {
Stopwatch stopwatch = Stopwatch.createStarted();
return actorService.findOne(id)
.map(foundActor -> {
resourceCallService.addCall("ActorRepository.findOne", ResourceType.DATABASE, Long.toString(id), stopwatch.elapsed(TimeUnit.MILLISECONDS));
List<WeatherReport> weatherReports = new ArrayList<>(5);
weatherReports.add(foundActor.getBirthLocation() == null ? new WeatherReport() : weatherService.retrieveWeatherReport(foundActor.getBirthLocation(), foundActor.getBirthDate()));
weatherReports.add(weatherService.retrieveWeatherReport(MONTREAL, foundActor.getBirthDate()));
weatherReports.add(weatherService.retrieveWeatherReport(PARIS, foundActor.getBirthDate()));
weatherReports.add(weatherService.retrieveWeatherReport(MOSCOW, foundActor.getBirthDate()));
weatherReports.add(weatherService.retrieveWeatherReport(TOKYO, foundActor.getBirthDate()));
long sum = resourceCallService.currentElapsed();
StarDTO actorAndWeatherAndCallReports =
new StarDTO(foundActor, weatherReports, resourceCallService.getReports(), sum, hostname);
return new ResponseEntity<>(actorAndWeatherAndCallReports, HttpStatus.OK);
})
.orElse(new ResponseEntity<>(HttpStatus.NOT_FOUND));
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/LogsResource.java | fullstack/src/main/java/org/ehcache/sample/web/rest/LogsResource.java | package org.ehcache.sample.web.rest;
import org.ehcache.sample.web.rest.vm.LoggerVM;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.LoggerContext;
import com.codahale.metrics.annotation.Timed;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.stream.Collectors;
/**
* Controller for view and managing Log Level at runtime.
*/
@RestController
@RequestMapping("/management")
public class LogsResource {
@GetMapping("/logs")
@Timed
public List<LoggerVM> getList() {
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
return context.getLoggerList()
.stream()
.map(LoggerVM::new)
.collect(Collectors.toList());
}
@PutMapping("/logs")
@ResponseStatus(HttpStatus.NO_CONTENT)
@Timed
public void changeLevel(@RequestBody LoggerVM jsonLogger) {
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
context.getLogger(jsonLogger.getName()).setLevel(Level.valueOf(jsonLogger.getLevel()));
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/UserResource.java | fullstack/src/main/java/org/ehcache/sample/web/rest/UserResource.java | package org.ehcache.sample.web.rest;
import org.ehcache.sample.config.Constants;
import org.ehcache.sample.domain.User;
import org.ehcache.sample.repository.UserRepository;
import org.ehcache.sample.security.AuthoritiesConstants;
import org.ehcache.sample.service.MailService;
import org.ehcache.sample.service.UserService;
import org.ehcache.sample.service.dto.UserDTO;
import org.ehcache.sample.web.rest.errors.BadRequestAlertException;
import org.ehcache.sample.web.rest.errors.EmailAlreadyUsedException;
import org.ehcache.sample.web.rest.errors.LoginAlreadyUsedException;
import org.ehcache.sample.web.rest.util.HeaderUtil;
import org.ehcache.sample.web.rest.util.PaginationUtil;
import com.codahale.metrics.annotation.Timed;
import io.github.jhipster.web.util.ResponseUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.*;
/**
* REST controller for managing users.
* <p>
* This class accesses the User entity, and needs to fetch its collection of authorities.
* <p>
* For a normal use-case, it would be better to have an eager relationship between User and Authority,
* and send everything to the client side: there would be no View Model and DTO, a lot less code, and an outer-join
* which would be good for performance.
* <p>
* We use a View Model and a DTO for 3 reasons:
* <ul>
* <li>We want to keep a lazy association between the user and the authorities, because people will
* quite often do relationships with the user, and we don't want them to get the authorities all
* the time for nothing (for performance reasons). This is the #1 goal: we should not impact our users'
* application because of this use-case.</li>
* <li> Not having an outer join causes n+1 requests to the database. This is not a real issue as
* we have by default a second-level cache. This means on the first HTTP call we do the n+1 requests,
* but then all authorities come from the cache, so in fact it's much better than doing an outer join
* (which will get lots of data from the database, for each HTTP call).</li>
* <li> As this manages users, for security reasons, we'd rather have a DTO layer.</li>
* </ul>
* <p>
* Another option would be to have a specific JPA entity graph to handle this case.
*/
@RestController
@RequestMapping("/api")
public class UserResource {
private final Logger log = LoggerFactory.getLogger(UserResource.class);
private final UserService userService;
private final UserRepository userRepository;
private final MailService mailService;
public UserResource(UserService userService, UserRepository userRepository, MailService mailService) {
this.userService = userService;
this.userRepository = userRepository;
this.mailService = mailService;
}
/**
* POST /users : Creates a new user.
* <p>
* Creates a new user if the login and email are not already used, and sends an
* mail with an activation link.
* The user needs to be activated on creation.
*
* @param userDTO the user to create
* @return the ResponseEntity with status 201 (Created) and with body the new user, or with status 400 (Bad Request) if the login or email is already in use
* @throws URISyntaxException if the Location URI syntax is incorrect
* @throws BadRequestAlertException 400 (Bad Request) if the login or email is already in use
*/
@PostMapping("/users")
@Timed
@PreAuthorize("hasRole(\"" + AuthoritiesConstants.ADMIN + "\")")
public ResponseEntity<User> createUser(@Valid @RequestBody UserDTO userDTO) throws URISyntaxException {
log.debug("REST request to save User : {}", userDTO);
if (userDTO.getId() != null) {
throw new BadRequestAlertException("A new user cannot already have an ID", "userManagement", "idexists");
// Lowercase the user login before comparing with database
} else if (userRepository.findOneByLogin(userDTO.getLogin().toLowerCase()).isPresent()) {
throw new LoginAlreadyUsedException();
} else if (userRepository.findOneByEmailIgnoreCase(userDTO.getEmail()).isPresent()) {
throw new EmailAlreadyUsedException();
} else {
User newUser = userService.createUser(userDTO);
mailService.sendCreationEmail(newUser);
return ResponseEntity.created(new URI("/api/users/" + newUser.getLogin()))
.headers(HeaderUtil.createAlert( "A user is created with identifier " + newUser.getLogin(), newUser.getLogin()))
.body(newUser);
}
}
/**
* PUT /users : Updates an existing User.
*
* @param userDTO the user to update
* @return the ResponseEntity with status 200 (OK) and with body the updated user
* @throws EmailAlreadyUsedException 400 (Bad Request) if the email is already in use
* @throws LoginAlreadyUsedException 400 (Bad Request) if the login is already in use
*/
@PutMapping("/users")
@Timed
@PreAuthorize("hasRole(\"" + AuthoritiesConstants.ADMIN + "\")")
public ResponseEntity<UserDTO> updateUser(@Valid @RequestBody UserDTO userDTO) {
log.debug("REST request to update User : {}", userDTO);
Optional<User> existingUser = userRepository.findOneByEmailIgnoreCase(userDTO.getEmail());
if (existingUser.isPresent() && (!existingUser.get().getId().equals(userDTO.getId()))) {
throw new EmailAlreadyUsedException();
}
existingUser = userRepository.findOneByLogin(userDTO.getLogin().toLowerCase());
if (existingUser.isPresent() && (!existingUser.get().getId().equals(userDTO.getId()))) {
throw new LoginAlreadyUsedException();
}
Optional<UserDTO> updatedUser = userService.updateUser(userDTO);
return ResponseUtil.wrapOrNotFound(updatedUser,
HeaderUtil.createAlert("A user is updated with identifier " + userDTO.getLogin(), userDTO.getLogin()));
}
/**
* GET /users : get all users.
*
* @param pageable the pagination information
* @return the ResponseEntity with status 200 (OK) and with body all users
*/
@GetMapping("/users")
@Timed
public ResponseEntity<List<UserDTO>> getAllUsers(Pageable pageable) {
final Page<UserDTO> page = userService.getAllManagedUsers(pageable);
HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/api/users");
return new ResponseEntity<>(page.getContent(), headers, HttpStatus.OK);
}
/**
* @return a string list of the all of the roles
*/
@GetMapping("/users/authorities")
@Timed
@PreAuthorize("hasRole(\"" + AuthoritiesConstants.ADMIN + "\")")
public List<String> getAuthorities() {
return userService.getAuthorities();
}
/**
* GET /users/:login : get the "login" user.
*
* @param login the login of the user to find
* @return the ResponseEntity with status 200 (OK) and with body the "login" user, or with status 404 (Not Found)
*/
@GetMapping("/users/{login:" + Constants.LOGIN_REGEX + "}")
@Timed
public ResponseEntity<UserDTO> getUser(@PathVariable String login) {
log.debug("REST request to get User : {}", login);
return ResponseUtil.wrapOrNotFound(
userService.getUserWithAuthoritiesByLogin(login)
.map(UserDTO::new));
}
/**
* DELETE /users/:login : delete the "login" User.
*
* @param login the login of the user to delete
* @return the ResponseEntity with status 200 (OK)
*/
@DeleteMapping("/users/{login:" + Constants.LOGIN_REGEX + "}")
@Timed
@PreAuthorize("hasRole(\"" + AuthoritiesConstants.ADMIN + "\")")
public ResponseEntity<Void> deleteUser(@PathVariable String login) {
log.debug("REST request to delete User: {}", login);
userService.deleteUser(login);
return ResponseEntity.ok().headers(HeaderUtil.createAlert( "A user is deleted with identifier " + login, login)).build();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/ActorResource.java | fullstack/src/main/java/org/ehcache/sample/web/rest/ActorResource.java | package org.ehcache.sample.web.rest;
import com.codahale.metrics.annotation.Timed;
import org.ehcache.sample.domain.Actor;
import org.ehcache.sample.service.ActorService;
import org.ehcache.sample.web.rest.errors.BadRequestAlertException;
import org.ehcache.sample.web.rest.util.HeaderUtil;
import org.ehcache.sample.web.rest.util.PaginationUtil;
import io.github.jhipster.web.util.ResponseUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Optional;
/**
* REST controller for managing Actor.
*/
@RestController
@RequestMapping("/api")
public class ActorResource {
private final Logger log = LoggerFactory.getLogger(ActorResource.class);
private static final String ENTITY_NAME = "actor";
private final ActorService actorService;
public ActorResource(ActorService actorService) {
this.actorService = actorService;
}
/**
* POST /actors : Create a new actor.
*
* @param actor the actor to create
* @return the ResponseEntity with status 201 (Created) and with body the new actor, or with status 400 (Bad Request) if the actor has already an ID
* @throws URISyntaxException if the Location URI syntax is incorrect
*/
@PostMapping("/actors")
@Timed
public ResponseEntity<Actor> createActor(@RequestBody Actor actor) throws URISyntaxException {
log.debug("REST request to save Actor : {}", actor);
if (actor.getId() != null) {
throw new BadRequestAlertException("A new actor cannot already have an ID", ENTITY_NAME, "idexists");
}
Actor result = actorService.save(actor);
return ResponseEntity.created(new URI("/api/actors/" + result.getId()))
.headers(HeaderUtil.createEntityCreationAlert(ENTITY_NAME, result.getId().toString()))
.body(result);
}
/**
* PUT /actors : Updates an existing actor.
*
* @param actor the actor to update
* @return the ResponseEntity with status 200 (OK) and with body the updated actor,
* or with status 400 (Bad Request) if the actor is not valid,
* or with status 500 (Internal Server Error) if the actor couldn't be updated
* @throws URISyntaxException if the Location URI syntax is incorrect
*/
@PutMapping("/actors")
@Timed
public ResponseEntity<Actor> updateActor(@RequestBody Actor actor) throws URISyntaxException {
log.debug("REST request to update Actor : {}", actor);
if (actor.getId() == null) {
throw new BadRequestAlertException("Invalid id", ENTITY_NAME, "idnull");
}
Actor result = actorService.save(actor);
return ResponseEntity.ok()
.headers(HeaderUtil.createEntityUpdateAlert(ENTITY_NAME, actor.getId().toString()))
.body(result);
}
/**
* GET /actors : get all the actors.
*
* @param pageable the pagination information
* @return the ResponseEntity with status 200 (OK) and the list of actors in body
*/
@GetMapping("/actors")
@Timed
public ResponseEntity<List<Actor>> getAllActors(Pageable pageable) {
log.debug("REST request to get a page of Actors");
Page<Actor> page = actorService.findAll(pageable);
HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/api/actors");
return ResponseEntity.ok().headers(headers).body(page.getContent());
}
/**
* GET /actors/:id : get the "id" actor.
*
* @param id the id of the actor to retrieve
* @return the ResponseEntity with status 200 (OK) and with body the actor, or with status 404 (Not Found)
*/
@GetMapping("/actors/{id}")
@Timed
public ResponseEntity<Actor> getActor(@PathVariable Long id) {
log.debug("REST request to get Actor : {}", id);
Optional<Actor> actor = actorService.findOne(id);
return ResponseUtil.wrapOrNotFound(actor);
}
/**
* DELETE /actors/:id : delete the "id" actor.
*
* @param id the id of the actor to delete
* @return the ResponseEntity with status 200 (OK)
*/
@DeleteMapping("/actors/{id}")
@Timed
public ResponseEntity<Void> deleteActor(@PathVariable Long id) {
log.debug("REST request to delete Actor : {}", id);
actorService.delete(id);
return ResponseEntity.ok().headers(HeaderUtil.createEntityDeletionAlert(ENTITY_NAME, id.toString())).build();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/AuditResource.java | fullstack/src/main/java/org/ehcache/sample/web/rest/AuditResource.java | package org.ehcache.sample.web.rest;
import org.ehcache.sample.service.AuditEventService;
import org.ehcache.sample.web.rest.util.PaginationUtil;
import io.github.jhipster.web.util.ResponseUtil;
import org.springframework.boot.actuate.audit.AuditEvent;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.time.LocalDate;
import java.time.ZoneId;
import java.util.List;
/**
* REST controller for getting the audit events.
*/
@RestController
@RequestMapping("/management/audits")
public class AuditResource {
private final AuditEventService auditEventService;
public AuditResource(AuditEventService auditEventService) {
this.auditEventService = auditEventService;
}
/**
* GET /audits : get a page of AuditEvents.
*
* @param pageable the pagination information
* @return the ResponseEntity with status 200 (OK) and the list of AuditEvents in body
*/
@GetMapping
public ResponseEntity<List<AuditEvent>> getAll(Pageable pageable) {
Page<AuditEvent> page = auditEventService.findAll(pageable);
HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/management/audits");
return new ResponseEntity<>(page.getContent(), headers, HttpStatus.OK);
}
/**
* GET /audits : get a page of AuditEvents between the fromDate and toDate.
*
* @param fromDate the start of the time period of AuditEvents to get
* @param toDate the end of the time period of AuditEvents to get
* @param pageable the pagination information
* @return the ResponseEntity with status 200 (OK) and the list of AuditEvents in body
*/
@GetMapping(params = {"fromDate", "toDate"})
public ResponseEntity<List<AuditEvent>> getByDates(
@RequestParam(value = "fromDate") LocalDate fromDate,
@RequestParam(value = "toDate") LocalDate toDate,
Pageable pageable) {
Page<AuditEvent> page = auditEventService.findByDates(
fromDate.atStartOfDay(ZoneId.systemDefault()).toInstant(),
toDate.atStartOfDay(ZoneId.systemDefault()).plusDays(1).toInstant(),
pageable);
HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/management/audits");
return new ResponseEntity<>(page.getContent(), headers, HttpStatus.OK);
}
/**
* GET /audits/:id : get an AuditEvent by id.
*
* @param id the id of the entity to get
* @return the ResponseEntity with status 200 (OK) and the AuditEvent in body, or status 404 (Not Found)
*/
@GetMapping("/{id:.+}")
public ResponseEntity<AuditEvent> get(@PathVariable Long id) {
return ResponseUtil.wrapOrNotFound(auditEventService.find(id));
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/util/HeaderUtil.java | fullstack/src/main/java/org/ehcache/sample/web/rest/util/HeaderUtil.java | package org.ehcache.sample.web.rest.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpHeaders;
/**
* Utility class for HTTP headers creation.
*/
public final class HeaderUtil {
private static final Logger log = LoggerFactory.getLogger(HeaderUtil.class);
private static final String APPLICATION_NAME = "demoApp";
private HeaderUtil() {
}
public static HttpHeaders createAlert(String message, String param) {
HttpHeaders headers = new HttpHeaders();
headers.add("X-" + APPLICATION_NAME + "-alert", message);
headers.add("X-" + APPLICATION_NAME + "-params", param);
return headers;
}
public static HttpHeaders createEntityCreationAlert(String entityName, String param) {
return createAlert("A new " + entityName + " is created with identifier " + param, param);
}
public static HttpHeaders createEntityUpdateAlert(String entityName, String param) {
return createAlert("A " + entityName + " is updated with identifier " + param, param);
}
public static HttpHeaders createEntityDeletionAlert(String entityName, String param) {
return createAlert("A " + entityName + " is deleted with identifier " + param, param);
}
public static HttpHeaders createFailureAlert(String entityName, String errorKey, String defaultMessage) {
log.error("Entity processing failed, {}", defaultMessage);
HttpHeaders headers = new HttpHeaders();
headers.add("X-" + APPLICATION_NAME + "-error", defaultMessage);
headers.add("X-" + APPLICATION_NAME + "-params", entityName);
return headers;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/util/PaginationUtil.java | fullstack/src/main/java/org/ehcache/sample/web/rest/util/PaginationUtil.java | package org.ehcache.sample.web.rest.util;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpHeaders;
import org.springframework.web.util.UriComponentsBuilder;
/**
* Utility class for handling pagination.
*
* <p>
* Pagination uses the same principles as the <a href="https://developer.github.com/v3/#pagination">GitHub API</a>,
* and follow <a href="http://tools.ietf.org/html/rfc5988">RFC 5988 (Link header)</a>.
*/
public final class PaginationUtil {
private PaginationUtil() {
}
public static <T> HttpHeaders generatePaginationHttpHeaders(Page<T> page, String baseUrl) {
HttpHeaders headers = new HttpHeaders();
headers.add("X-Total-Count", Long.toString(page.getTotalElements()));
String link = "";
if ((page.getNumber() + 1) < page.getTotalPages()) {
link = "<" + generateUri(baseUrl, page.getNumber() + 1, page.getSize()) + ">; rel=\"next\",";
}
// prev link
if ((page.getNumber()) > 0) {
link += "<" + generateUri(baseUrl, page.getNumber() - 1, page.getSize()) + ">; rel=\"prev\",";
}
// last and first link
int lastPage = 0;
if (page.getTotalPages() > 0) {
lastPage = page.getTotalPages() - 1;
}
link += "<" + generateUri(baseUrl, lastPage, page.getSize()) + ">; rel=\"last\",";
link += "<" + generateUri(baseUrl, 0, page.getSize()) + ">; rel=\"first\"";
headers.add(HttpHeaders.LINK, link);
return headers;
}
private static String generateUri(String baseUrl, int page, int size) {
return UriComponentsBuilder.fromUriString(baseUrl).queryParam("page", page).queryParam("size", size).toUriString();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/InvalidPasswordException.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/InvalidPasswordException.java | package org.ehcache.sample.web.rest.errors;
import org.zalando.problem.AbstractThrowableProblem;
import org.zalando.problem.Status;
public class InvalidPasswordException extends AbstractThrowableProblem {
private static final long serialVersionUID = 1L;
public InvalidPasswordException() {
super(ErrorConstants.INVALID_PASSWORD_TYPE, "Incorrect password", Status.BAD_REQUEST);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/CustomParameterizedException.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/CustomParameterizedException.java | package org.ehcache.sample.web.rest.errors;
import org.zalando.problem.AbstractThrowableProblem;
import java.util.HashMap;
import java.util.Map;
import static org.zalando.problem.Status.BAD_REQUEST;
/**
* Custom, parameterized exception, which can be translated on the client side.
* For example:
*
* <pre>
* throw new CustomParameterizedException("myCustomError", "hello", "world");
* </pre>
*
* Can be translated with:
*
* <pre>
* "error.myCustomError" : "The server says {{param0}} to {{param1}}"
* </pre>
*/
public class CustomParameterizedException extends AbstractThrowableProblem {
private static final long serialVersionUID = 1L;
private static final String PARAM = "param";
public CustomParameterizedException(String message, String... params) {
this(message, toParamMap(params));
}
public CustomParameterizedException(String message, Map<String, Object> paramMap) {
super(ErrorConstants.PARAMETERIZED_TYPE, "Parameterized Exception", BAD_REQUEST, null, null, null, toProblemParameters(message, paramMap));
}
public static Map<String, Object> toParamMap(String... params) {
Map<String, Object> paramMap = new HashMap<>();
if (params != null && params.length > 0) {
for (int i = 0; i < params.length; i++) {
paramMap.put(PARAM + i, params[i]);
}
}
return paramMap;
}
public static Map<String, Object> toProblemParameters(String message, Map<String, Object> paramMap) {
Map<String, Object> parameters = new HashMap<>();
parameters.put("message", message);
parameters.put("params", paramMap);
return parameters;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/package-info.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/package-info.java | /**
* Specific errors used with Zalando's "problem-spring-web" library.
*
* More information on https://github.com/zalando/problem-spring-web
*/
package org.ehcache.sample.web.rest.errors;
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/LoginAlreadyUsedException.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/LoginAlreadyUsedException.java | package org.ehcache.sample.web.rest.errors;
public class LoginAlreadyUsedException extends BadRequestAlertException {
private static final long serialVersionUID = 1L;
public LoginAlreadyUsedException() {
super(ErrorConstants.LOGIN_ALREADY_USED_TYPE, "Login name already used!", "userManagement", "userexists");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/EmailNotFoundException.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/EmailNotFoundException.java | package org.ehcache.sample.web.rest.errors;
import org.zalando.problem.AbstractThrowableProblem;
import org.zalando.problem.Status;
public class EmailNotFoundException extends AbstractThrowableProblem {
private static final long serialVersionUID = 1L;
public EmailNotFoundException() {
super(ErrorConstants.EMAIL_NOT_FOUND_TYPE, "Email address not registered", Status.BAD_REQUEST);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/InternalServerErrorException.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/InternalServerErrorException.java | package org.ehcache.sample.web.rest.errors;
import org.zalando.problem.AbstractThrowableProblem;
import org.zalando.problem.Status;
/**
* Simple exception with a message, that returns an Internal Server Error code.
*/
public class InternalServerErrorException extends AbstractThrowableProblem {
private static final long serialVersionUID = 1L;
public InternalServerErrorException(String message) {
super(ErrorConstants.DEFAULT_TYPE, message, Status.INTERNAL_SERVER_ERROR);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/ErrorConstants.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/ErrorConstants.java | package org.ehcache.sample.web.rest.errors;
import java.net.URI;
public final class ErrorConstants {
public static final String ERR_CONCURRENCY_FAILURE = "error.concurrencyFailure";
public static final String ERR_VALIDATION = "error.validation";
public static final String PROBLEM_BASE_URL = "https://www.jhipster.tech/problem";
public static final URI DEFAULT_TYPE = URI.create(PROBLEM_BASE_URL + "/problem-with-message");
public static final URI CONSTRAINT_VIOLATION_TYPE = URI.create(PROBLEM_BASE_URL + "/constraint-violation");
public static final URI PARAMETERIZED_TYPE = URI.create(PROBLEM_BASE_URL + "/parameterized");
public static final URI ENTITY_NOT_FOUND_TYPE = URI.create(PROBLEM_BASE_URL + "/entity-not-found");
public static final URI INVALID_PASSWORD_TYPE = URI.create(PROBLEM_BASE_URL + "/invalid-password");
public static final URI EMAIL_ALREADY_USED_TYPE = URI.create(PROBLEM_BASE_URL + "/email-already-used");
public static final URI LOGIN_ALREADY_USED_TYPE = URI.create(PROBLEM_BASE_URL + "/login-already-used");
public static final URI EMAIL_NOT_FOUND_TYPE = URI.create(PROBLEM_BASE_URL + "/email-not-found");
private ErrorConstants() {
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/FieldErrorVM.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/FieldErrorVM.java | package org.ehcache.sample.web.rest.errors;
import java.io.Serializable;
public class FieldErrorVM implements Serializable {
private static final long serialVersionUID = 1L;
private final String objectName;
private final String field;
private final String message;
public FieldErrorVM(String dto, String field, String message) {
this.objectName = dto;
this.field = field;
this.message = message;
}
public String getObjectName() {
return objectName;
}
public String getField() {
return field;
}
public String getMessage() {
return message;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/EmailAlreadyUsedException.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/EmailAlreadyUsedException.java | package org.ehcache.sample.web.rest.errors;
public class EmailAlreadyUsedException extends BadRequestAlertException {
private static final long serialVersionUID = 1L;
public EmailAlreadyUsedException() {
super(ErrorConstants.EMAIL_ALREADY_USED_TYPE, "Email is already in use!", "userManagement", "emailexists");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/BadRequestAlertException.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/BadRequestAlertException.java | package org.ehcache.sample.web.rest.errors;
import org.zalando.problem.AbstractThrowableProblem;
import org.zalando.problem.Status;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
public class BadRequestAlertException extends AbstractThrowableProblem {
private static final long serialVersionUID = 1L;
private final String entityName;
private final String errorKey;
public BadRequestAlertException(String defaultMessage, String entityName, String errorKey) {
this(ErrorConstants.DEFAULT_TYPE, defaultMessage, entityName, errorKey);
}
public BadRequestAlertException(URI type, String defaultMessage, String entityName, String errorKey) {
super(type, defaultMessage, Status.BAD_REQUEST, null, null, null, getAlertParameters(entityName, errorKey));
this.entityName = entityName;
this.errorKey = errorKey;
}
public String getEntityName() {
return entityName;
}
public String getErrorKey() {
return errorKey;
}
private static Map<String, Object> getAlertParameters(String entityName, String errorKey) {
Map<String, Object> parameters = new HashMap<>();
parameters.put("message", "error." + errorKey);
parameters.put("params", entityName);
return parameters;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/errors/ExceptionTranslator.java | fullstack/src/main/java/org/ehcache/sample/web/rest/errors/ExceptionTranslator.java | package org.ehcache.sample.web.rest.errors;
import org.ehcache.sample.web.rest.util.HeaderUtil;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.context.request.NativeWebRequest;
import org.zalando.problem.DefaultProblem;
import org.zalando.problem.Problem;
import org.zalando.problem.ProblemBuilder;
import org.zalando.problem.Status;
import org.zalando.problem.spring.web.advice.ProblemHandling;
import org.zalando.problem.violations.ConstraintViolationProblem;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.stream.Collectors;
/**
* Controller advice to translate the server side exceptions to client-friendly json structures.
* The error response follows RFC7807 - Problem Details for HTTP APIs (https://tools.ietf.org/html/rfc7807)
*/
@ControllerAdvice
public class ExceptionTranslator implements ProblemHandling {
/**
* Post-process the Problem payload to add the message key for the front-end if needed
*/
@Override
public ResponseEntity<Problem> process(@Nullable ResponseEntity<Problem> entity, NativeWebRequest request) {
if (entity == null) {
return entity;
}
Problem problem = entity.getBody();
if (!(problem instanceof ConstraintViolationProblem || problem instanceof DefaultProblem)) {
return entity;
}
ProblemBuilder builder = Problem.builder()
.withType(Problem.DEFAULT_TYPE.equals(problem.getType()) ? ErrorConstants.DEFAULT_TYPE : problem.getType())
.withStatus(problem.getStatus())
.withTitle(problem.getTitle())
.with("path", request.getNativeRequest(HttpServletRequest.class).getRequestURI());
if (problem instanceof ConstraintViolationProblem) {
builder
.with("violations", ((ConstraintViolationProblem) problem).getViolations())
.with("message", ErrorConstants.ERR_VALIDATION);
} else {
builder
.withCause(((DefaultProblem) problem).getCause())
.withDetail(problem.getDetail())
.withInstance(problem.getInstance());
problem.getParameters().forEach(builder::with);
if (!problem.getParameters().containsKey("message") && problem.getStatus() != null) {
builder.with("message", "error.http." + problem.getStatus().getStatusCode());
}
}
return new ResponseEntity<>(builder.build(), entity.getHeaders(), entity.getStatusCode());
}
@Override
public ResponseEntity<Problem> handleMethodArgumentNotValid(MethodArgumentNotValidException ex, @Nonnull NativeWebRequest request) {
BindingResult result = ex.getBindingResult();
List<FieldErrorVM> fieldErrors = result.getFieldErrors().stream()
.map(f -> new FieldErrorVM(f.getObjectName(), f.getField(), f.getCode()))
.collect(Collectors.toList());
Problem problem = Problem.builder()
.withType(ErrorConstants.CONSTRAINT_VIOLATION_TYPE)
.withTitle("Method argument not valid")
.withStatus(defaultConstraintViolationStatus())
.with("message", ErrorConstants.ERR_VALIDATION)
.with("fieldErrors", fieldErrors)
.build();
return create(ex, problem, request);
}
@ExceptionHandler
public ResponseEntity<Problem> handleNoSuchElementException(NoSuchElementException ex, NativeWebRequest request) {
Problem problem = Problem.builder()
.withStatus(Status.NOT_FOUND)
.with("message", ErrorConstants.ENTITY_NOT_FOUND_TYPE)
.build();
return create(ex, problem, request);
}
@ExceptionHandler
public ResponseEntity<Problem> handleBadRequestAlertException(BadRequestAlertException ex, NativeWebRequest request) {
return create(ex, request, HeaderUtil.createFailureAlert(ex.getEntityName(), ex.getErrorKey(), ex.getMessage()));
}
@ExceptionHandler
public ResponseEntity<Problem> handleConcurrencyFailure(ConcurrencyFailureException ex, NativeWebRequest request) {
Problem problem = Problem.builder()
.withStatus(Status.CONFLICT)
.with("message", ErrorConstants.ERR_CONCURRENCY_FAILURE)
.build();
return create(ex, problem, request);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/vm/package-info.java | fullstack/src/main/java/org/ehcache/sample/web/rest/vm/package-info.java | /**
* View Models used by Spring MVC REST controllers.
*/
package org.ehcache.sample.web.rest.vm;
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/vm/LoggerVM.java | fullstack/src/main/java/org/ehcache/sample/web/rest/vm/LoggerVM.java | package org.ehcache.sample.web.rest.vm;
import ch.qos.logback.classic.Logger;
/**
* View Model object for storing a Logback logger.
*/
public class LoggerVM {
private String name;
private String level;
public LoggerVM(Logger logger) {
this.name = logger.getName();
this.level = logger.getEffectiveLevel().toString();
}
public LoggerVM() {
// Empty public constructor used by Jackson.
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getLevel() {
return level;
}
public void setLevel(String level) {
this.level = level;
}
@Override
public String toString() {
return "LoggerVM{" +
"name='" + name + '\'' +
", level='" + level + '\'' +
'}';
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/vm/KeyAndPasswordVM.java | fullstack/src/main/java/org/ehcache/sample/web/rest/vm/KeyAndPasswordVM.java | package org.ehcache.sample.web.rest.vm;
/**
* View Model object for storing the user's key and password.
*/
public class KeyAndPasswordVM {
private String key;
private String newPassword;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getNewPassword() {
return newPassword;
}
public void setNewPassword(String newPassword) {
this.newPassword = newPassword;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/vm/ManagedUserVM.java | fullstack/src/main/java/org/ehcache/sample/web/rest/vm/ManagedUserVM.java | package org.ehcache.sample.web.rest.vm;
import org.ehcache.sample.service.dto.UserDTO;
import javax.validation.constraints.Size;
/**
* View Model extending the UserDTO, which is meant to be used in the user management UI.
*/
public class ManagedUserVM extends UserDTO {
public static final int PASSWORD_MIN_LENGTH = 4;
public static final int PASSWORD_MAX_LENGTH = 100;
@Size(min = PASSWORD_MIN_LENGTH, max = PASSWORD_MAX_LENGTH)
private String password;
public ManagedUserVM() {
// Empty constructor needed for Jackson.
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
@Override
public String toString() {
return "ManagedUserVM{" +
"} " + super.toString();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/fullstack/src/main/java/org/ehcache/sample/web/rest/vm/LoginVM.java | fullstack/src/main/java/org/ehcache/sample/web/rest/vm/LoginVM.java | package org.ehcache.sample.web.rest.vm;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
/**
* View Model object for storing a user's credentials.
*/
public class LoginVM {
@NotNull
@Size(min = 1, max = 50)
private String username;
@NotNull
@Size(min = ManagedUserVM.PASSWORD_MIN_LENGTH, max = ManagedUserVM.PASSWORD_MAX_LENGTH)
private String password;
private Boolean rememberMe;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Boolean isRememberMe() {
return rememberMe;
}
public void setRememberMe(Boolean rememberMe) {
this.rememberMe = rememberMe;
}
@Override
public String toString() {
return "LoginVM{" +
"username='" + username + '\'' +
", rememberMe=" + rememberMe +
'}';
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/basic/src/main/java/org/ehcache/sample/BasicXML.java | basic/src/main/java/org/ehcache/sample/BasicXML.java | package org.ehcache.sample;
import org.ehcache.Cache;
import org.ehcache.CacheManager;
import org.ehcache.config.Configuration;
import org.ehcache.xml.XmlConfiguration;
import org.slf4j.Logger;
import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManager;
import static org.slf4j.LoggerFactory.getLogger;
public class BasicXML {
private static final Logger LOGGER = getLogger(BasicXML.class);
public static void main(String[] args) {
LOGGER.info("Creating cache manager via XML resource");
Configuration xmlConfig = new XmlConfiguration(BasicXML.class.getResource("/ehcache.xml"));
try (CacheManager cacheManager = newCacheManager(xmlConfig)) {
cacheManager.init();
Cache<Long, String> basicCache = cacheManager.getCache("basicCache", Long.class, String.class);
LOGGER.info("Putting to cache");
basicCache.put(1L, "da one!");
String value = basicCache.get(1L);
LOGGER.info("Retrieved '{}'", value);
LOGGER.info("Closing cache manager");
}
LOGGER.info("Exiting");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/basic/src/main/java/org/ehcache/sample/BasicProgrammatic.java | basic/src/main/java/org/ehcache/sample/BasicProgrammatic.java | package org.ehcache.sample;
import org.ehcache.Cache;
import org.ehcache.CacheManager;
import org.slf4j.Logger;
import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder;
import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder;
import static org.ehcache.config.builders.ResourcePoolsBuilder.heap;
import static org.ehcache.config.units.MemoryUnit.MB;
import static org.slf4j.LoggerFactory.getLogger;
public class BasicProgrammatic {
private static final Logger LOGGER = getLogger(BasicProgrammatic.class);
public static void main(String[] args) {
LOGGER.info("Creating cache manager programmatically");
try (CacheManager cacheManager = newCacheManagerBuilder()
.withCache("basicCache",
newCacheConfigurationBuilder(Long.class, String.class, heap(100).offheap(1, MB)))
.build(true)) {
Cache<Long, String> basicCache = cacheManager.getCache("basicCache", Long.class, String.class);
LOGGER.info("Putting to cache");
basicCache.put(1L, "da one!");
String value = basicCache.get(1L);
LOGGER.info("Retrieved '{}'", value);
LOGGER.info("Closing cache manager");
}
LOGGER.info("Exiting");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/Main.java | scale-continuum/src/main/java/org/terracotta/sample/Main.java | package org.terracotta.sample;
import io.rainfall.statistics.StatisticsPeekHolder;
import org.terracotta.sample.collector.Config;
import org.terracotta.sample.collector.Entry;
import org.terracotta.sample.collector.PerformanceMetricsCollector;
import org.terracotta.sample.collector.QueueReporter;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicReference;
import static spark.Spark.exception;
import static spark.Spark.get;
import static spark.Spark.post;
import static spark.Spark.staticFileLocation;
/**
* @author Aurelien Broszniowski
*/
public class Main {
public static void main(String[] args) {
AtomicReference<Future<StatisticsPeekHolder>> futureRef = new AtomicReference<>();
PerformanceMetricsCollector metricsCollector = new PerformanceMetricsCollector();
staticFileLocation("/public");
post("/api/start", (request, response) -> {
String body = request.body();
GsonBuilder builder = new GsonBuilder();
Gson gson = builder.create();
Entry[] entries = gson.fromJson(body, Entry[].class);
if (futureRef.get() != null) {
throw new RuntimeException("job already is in progress");
}
Future<StatisticsPeekHolder> f = metricsCollector.start(new Config(entries));
if (!futureRef.compareAndSet(null, f)) {
throw new RuntimeException("job already is in progress");
}
return "Started.";
});
get("/api/waitUntilDone", (request, response) -> {
Future<StatisticsPeekHolder> future = futureRef.get();
if (future == null) {
throw new RuntimeException("no job started");
}
future.get();
futureRef.set(null);
return "Done.";
});
get("/api/cancel", (request, response) -> {
Future<StatisticsPeekHolder> future = futureRef.get();
if (future == null) {
throw new RuntimeException("no job started");
}
future.cancel(true);
futureRef.set(null);
return "Done.";
});
get("/api/cancelNoFail", (request, response) -> {
Future<StatisticsPeekHolder> future = futureRef.get();
if (future != null) {
future.cancel(true);
futureRef.set(null);
}
return "Done.";
});
get("/api/stats", (request, response) -> {
response.type("application/json");
List<QueueReporter.Result> data = new ArrayList<>();
while (metricsCollector.isRunning()) {
QueueReporter.Result result = metricsCollector.pollStats();
if (result == null) {
break;
}
data.add(result);
}
if (data.isEmpty() && !metricsCollector.isRunning()) {
// marker value to signify the end of the data
data.add(new QueueReporter.Result(-1));
}
GsonBuilder builder = new GsonBuilder();
Gson gson = builder.create();
return gson.toJson(data);
});
exception(Exception.class, (exception, request, response) -> {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
exception.printStackTrace(pw);
pw.close();
String stackTrace = sw.toString();
response.status(500);
response.body(stackTrace);
});
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/collector/Entry.java | scale-continuum/src/main/java/org/terracotta/sample/collector/Entry.java | package org.terracotta.sample.collector;
/**
* @author Aurelien Broszniowski
*/
public class Entry {
private String name;
private String value;
public Entry() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public String toString() {
return name + "->" + value;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/collector/Config.java | scale-continuum/src/main/java/org/terracotta/sample/collector/Config.java | package org.terracotta.sample.collector;
import org.ehcache.impl.internal.store.offheap.MemorySizeParser;
/**
* @author Aurelien Broszniowski
*/
public class Config {
private final long datasetCount;
private final boolean cacheEnabled;
private final Long heapSizeCount;
private final Long offheapSizeCount;
private final String terracottaUrl;
private final int clientCount;
private final long valueSizeInBytes;
public Config(Entry[] entries) {
this.datasetCount = Long.parseLong(findEntry(entries, "datasetCount"));
this.valueSizeInBytes = MemorySizeParser.parse(findEntry(entries, "valueSize"));
this.clientCount = Integer.parseInt(findEntry(entries, "clientCount", "16"));
this.cacheEnabled = "on".equalsIgnoreCase(findEntry(entries, "cacheEnabled"));
if ("on".equalsIgnoreCase(findEntry(entries, "heapEnabled"))) {
this.heapSizeCount = Long.parseLong(findEntry(entries, "heapSize"));
} else {
this.heapSizeCount = null;
}
if ("on".equalsIgnoreCase(findEntry(entries, "offheapEnabled"))) {
this.offheapSizeCount = Long.parseLong(findEntry(entries, "offheapSize"));
} else {
this.offheapSizeCount = null;
}
if ("on".equalsIgnoreCase(findEntry(entries, "TerracottaEnabled"))) {
this.terracottaUrl = findEntry(entries, "terracottaUrl");
} else {
this.terracottaUrl = null;
}
}
public long getDatasetCount() {
return datasetCount;
}
public boolean isCacheEnabled() {
return cacheEnabled;
}
public Long getHeapSizeCount() {
return heapSizeCount;
}
public Long getOffheapSizeCount() {
return offheapSizeCount;
}
public String getTerracottaUrl() {
return terracottaUrl;
}
public int getClientCount() {
return clientCount;
}
public long getValueSizeInBytes() {
return valueSizeInBytes;
}
private String findEntry(Entry[] entries, String name) {
return findEntry(entries, name, null);
}
private String findEntry(Entry[] entries, String name, String defaultValue) {
for (Entry entry : entries) {
if (entry.getName().equals(name)) {
return entry.getValue();
}
}
return defaultValue;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/collector/Ehcache3Stats.java | scale-continuum/src/main/java/org/terracotta/sample/collector/Ehcache3Stats.java | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terracotta.sample.collector;
import org.ehcache.Cache;
import org.terracotta.context.ContextManager;
import org.terracotta.context.TreeNode;
import org.terracotta.context.query.Matcher;
import org.terracotta.context.query.Matchers;
import org.terracotta.context.query.Query;
import org.terracotta.statistics.OperationStatistic;
import org.terracotta.statistics.ValueStatistic;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import static org.terracotta.context.query.Matchers.attributes;
import static org.terracotta.context.query.Matchers.context;
import static org.terracotta.context.query.Matchers.hasAttribute;
import static org.terracotta.context.query.Matchers.identifier;
import static org.terracotta.context.query.Matchers.subclassOf;
import static org.terracotta.context.query.Queries.self;
import static org.terracotta.context.query.QueryBuilder.queryBuilder;
/**
* @author Ludovic Orban
*/
class Ehcache3Stats {
public static OperationStatistic findOperationStat(Cache<?, ?> cache1, final String statName, final String tag) {
Query q = queryBuilder()
.descendants().filter(context(identifier(subclassOf(OperationStatistic.class)))).build();
Set<TreeNode> operationStatisticNodes = q.execute(Collections.singleton(ContextManager.nodeFor(cache1)));
Set<TreeNode> result = queryBuilder()
.filter(
context(attributes(Matchers.<Map<String, Object>>allOf(
hasAttribute("name", statName), hasAttribute("tags", new Matcher<Set<String>>() {
@Override
protected boolean matchesSafely(Set<String> object) {
return object.contains(tag);
}
}))))).build().execute(operationStatisticNodes);
if (result.size() != 1) {
throw new RuntimeException("single stat not found; found " + result.size());
}
TreeNode node = result.iterator().next();
return (OperationStatistic) node.getContext().attributes().get("this");
}
public static ValueStatistic findValueStat(Cache<?, ?> cache1, final String statName, final String tag) {
Query q = queryBuilder().chain(self())
.descendants().filter(context(identifier(subclassOf(ValueStatistic.class)))).build();
Set<TreeNode> nodes = q.execute(Collections.singleton(ContextManager.nodeFor(cache1)));
Set<TreeNode> result = queryBuilder()
.filter(
context(attributes(Matchers.<Map<String, Object>>allOf(
hasAttribute("name", statName), hasAttribute("tags", new Matcher<Set<String>>() {
@Override
protected boolean matchesSafely(Set<String> object) {
return object.contains(tag);
}
}))))).build().execute(nodes);
if (result.size() != 1) {
throw new RuntimeException("single stat not found; found " + result.size());
}
TreeNode node = result.iterator().next();
return (ValueStatistic) node.getContext().attributes().get("this");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/collector/QueueReporter.java | scale-continuum/src/main/java/org/terracotta/sample/collector/QueueReporter.java | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terracotta.sample.collector;
import io.rainfall.reporting.Reporter;
import io.rainfall.statistics.StatisticsHolder;
import io.rainfall.statistics.StatisticsPeek;
import io.rainfall.statistics.StatisticsPeekHolder;
import org.ehcache.Cache;
import java.util.List;
import java.util.Queue;
import static org.terracotta.sample.collector.Ehcache3Stats.findValueStat;
import static org.terracotta.sample.collector.PerformanceMetricsCollector.DaoResult.LOAD;
import static org.terracotta.sample.collector.PerformanceMetricsCollector.OPERATION_NAME;
/**
* @author Ludovic Orban
*/
public class QueueReporter extends Reporter<PerformanceMetricsCollector.DaoResult> {
public static class Result {
private final long timestamp;
private final Long periodicTps;
private final Double periodicAverageLatencyInMs;
private final Number onHeapCount;
private final Number offHeapSizeInMegabytes;
public Result(long timestamp) {
this(timestamp, null, null, null, null);
}
public Result(long timestamp, Long periodicTps, Double periodicAverageLatencyInMs, Number onHeapCount, Number offHeapSizeInMegabytes) {
this.timestamp = timestamp;
this.periodicTps = periodicTps;
this.periodicAverageLatencyInMs = periodicAverageLatencyInMs;
this.onHeapCount = onHeapCount;
this.offHeapSizeInMegabytes = offHeapSizeInMegabytes;
}
public long getTimestamp() {
return timestamp;
}
public Long getPeriodicTps() {
return periodicTps;
}
public Double getPeriodicAverageLatencyInMs() {
return periodicAverageLatencyInMs;
}
public Number getOnHeapCount() {
return onHeapCount;
}
public Number getOffHeapSizeInMegabytes() {
return offHeapSizeInMegabytes;
}
}
private final Queue<Result> resultQueue;
private final Cache<?, ?> cache;
public QueueReporter(Queue<Result> resultQueue, Cache<?, ?> cache) {
this.resultQueue = resultQueue;
this.cache = cache;
}
@Override
public void header(List<String> list) {
}
@Override
public void report(StatisticsPeekHolder<PerformanceMetricsCollector.DaoResult> statisticsPeekHolder) {
StatisticsPeek<PerformanceMetricsCollector.DaoResult> statisticsPeek = statisticsPeekHolder.getStatisticsPeeks(OPERATION_NAME);
Double periodicAverageLatencyInMs = statisticsPeek.getPeriodicAverageLatencyInMs(LOAD);
Long periodicTps = statisticsPeek.getPeriodicTps(LOAD);
long timestamp = statisticsPeek.getTimestamp();
Number onHeapCount = null;
Number offHeapSizeInMegabytes = null;
if (cache != null) {
try {
onHeapCount = (Number) findValueStat(cache, "mappingsCount", "onheap-store").value();
} catch (Exception e) { /* ignore, not present */ }
try {
offHeapSizeInMegabytes = ((Long)findValueStat(cache, "occupiedMemory", "local-offheap").value()) / 1024 / 1024;
} catch (Exception e) { /* ignore, not present */ }
}
if (periodicAverageLatencyInMs.isNaN()) {
return;
}
resultQueue.offer(new Result(timestamp, periodicTps, periodicAverageLatencyInMs, onHeapCount, offHeapSizeInMegabytes));
}
@Override
public void summarize(StatisticsHolder<PerformanceMetricsCollector.DaoResult> statisticsHolder) {
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/collector/PerformanceMetricsCollector.java | scale-continuum/src/main/java/org/terracotta/sample/collector/PerformanceMetricsCollector.java | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terracotta.sample.collector;
import io.rainfall.AssertionEvaluator;
import io.rainfall.Configuration;
import io.rainfall.ObjectGenerator;
import io.rainfall.Operation;
import io.rainfall.Runner;
import io.rainfall.Scenario;
import io.rainfall.TestException;
import io.rainfall.configuration.ConcurrencyConfig;
import io.rainfall.configuration.ReportingConfig;
import io.rainfall.generator.ByteArrayGenerator;
import io.rainfall.generator.LongGenerator;
import io.rainfall.generator.RandomSequenceGenerator;
import io.rainfall.generator.sequence.Distribution;
import io.rainfall.statistics.StatisticsHolder;
import io.rainfall.statistics.StatisticsPeekHolder;
import io.rainfall.unit.TimeDivision;
import org.ehcache.Cache;
import org.ehcache.CacheManager;
import org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder;
import org.ehcache.config.builders.CacheConfigurationBuilder;
import org.ehcache.config.builders.CacheManagerBuilder;
import org.ehcache.config.builders.ResourcePoolsBuilder;
import org.ehcache.config.units.EntryUnit;
import org.ehcache.config.units.MemoryUnit;
import org.terracotta.sample.dao.SoRDao;
import org.terracotta.sample.dao.SorLoaderWriter;
import org.terracotta.sample.service.CachedDataService;
import org.terracotta.sample.service.DataService;
import org.terracotta.sample.service.UncachedDataService;
import java.net.URI;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static io.rainfall.execution.Executions.during;
import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated;
import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder;
import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder;
import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder;
import static org.ehcache.config.units.MemoryUnit.GB;
/**
* @author Ludovic Orban
*/
public class PerformanceMetricsCollector {
public static final String OPERATION_NAME = "load";
private final ExecutorService executorService = Executors.newSingleThreadExecutor();
public enum DaoResult {
LOAD
}
private volatile Queue<QueueReporter.Result> resultQueue;
private CacheManager cacheManager;
public Future<StatisticsPeekHolder> start(Config config) throws Exception {
if (resultQueue != null) {
throw new RuntimeException("Execution is in progress");
}
ConcurrencyConfig concurrency = ConcurrencyConfig.concurrencyConfig().threads(config.getClientCount());
long objectCount = config.getDatasetCount();
ObjectGenerator<Long> keyGenerator = new LongGenerator();
ObjectGenerator<byte[]> valueGenerator = ByteArrayGenerator.fixedLengthByteArray((int)config.getValueSizeInBytes());
DataService<byte[]> dataService;
final Cache<Long, byte[]> cache;
if (config.isCacheEnabled()) {
ResourcePoolsBuilder resourcePoolsBuilder = newResourcePoolsBuilder();
if (config.getHeapSizeCount() != null) {
resourcePoolsBuilder = resourcePoolsBuilder.heap(config.getHeapSizeCount(), EntryUnit.ENTRIES);
}
if (config.getOffheapSizeCount() != null) {
resourcePoolsBuilder = resourcePoolsBuilder.offheap(config.getOffheapSizeCount(), MemoryUnit.MB);
}
if (config.getTerracottaUrl() != null) {
resourcePoolsBuilder = resourcePoolsBuilder.with(clusteredDedicated("primary-server-resource", 1, GB));
}
CacheConfigurationBuilder<Long, byte[]> cacheConfigurationBuilder = newCacheConfigurationBuilder(Long.class, byte[].class,
resourcePoolsBuilder)
.withLoaderWriter(new SorLoaderWriter(new SoRDao<>(valueGenerator)));
CacheManagerBuilder cacheManagerBuilder = newCacheManagerBuilder();
if (config.getTerracottaUrl() != null) {
cacheManagerBuilder = cacheManagerBuilder.with(ClusteringServiceConfigurationBuilder.cluster(
URI.create("terracotta://" + config.getTerracottaUrl() + "/clusterExample"))
.autoCreate().build());
}
cacheManager = cacheManagerBuilder
.withCache("cache", cacheConfigurationBuilder.build())
.build(true);
cache = cacheManager.getCache("cache", Long.class, byte[].class);
dataService = new CachedDataService(cache);
} else {
dataService = new UncachedDataService<>(valueGenerator);
cache = null;
}
RandomSequenceGenerator randomSequenceGenerator = new RandomSequenceGenerator(Distribution.SLOW_GAUSSIAN, 0, objectCount, objectCount / 10);
resultQueue = new LinkedBlockingQueue<>();
Callable<StatisticsPeekHolder> callable = () -> Runner.setUp(
Scenario.scenario("Scaling demo").exec(
new Operation() {
@Override
public void exec(StatisticsHolder statisticsHolder, Map<Class<? extends Configuration>, Configuration> map, List<AssertionEvaluator> list) throws TestException {
Long key = keyGenerator.generate(randomSequenceGenerator.next());
long before = System.nanoTime();
byte[] bytes = dataService.loadData(key);
long after = System.nanoTime();
statisticsHolder.record(OPERATION_NAME, (after - before), DaoResult.LOAD);
}
@Override
public List<String> getDescription() {
return Arrays.asList("Service get");
}
}
))
.executed(during(24 * 60, TimeDivision.minutes))
.config(concurrency, ReportingConfig.report(DaoResult.class).log(new QueueReporter(resultQueue, cache)))
.start();
Future<StatisticsPeekHolder> future = executorService.submit(callable);
return new Future<StatisticsPeekHolder>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
boolean cancel = future.cancel(true);
if (cacheManager != null) {
cacheManager.close();
}
resultQueue = null;
return cancel;
}
@Override
public boolean isCancelled() {
return future.isCancelled();
}
@Override
public boolean isDone() {
return future.isDone();
}
@Override
public StatisticsPeekHolder get() throws InterruptedException, ExecutionException {
return future.get();
}
@Override
public StatisticsPeekHolder get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
return future.get(timeout, unit);
}
};
}
public QueueReporter.Result pollStats() {
if (resultQueue == null) {
return null;
}
return resultQueue.poll();
}
public boolean isRunning() {
return resultQueue != null;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/dao/SoRDao.java | scale-continuum/src/main/java/org/terracotta/sample/dao/SoRDao.java | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terracotta.sample.dao;
import io.rainfall.ObjectGenerator;
import java.util.Random;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicInteger;
/**
* @author Ludovic Orban
*/
public class SoRDao<T> {
private final static int INTERVAL_MS = 10;
private final static int MAX_TPS = 80000;
private volatile int tps = 1;
private final AtomicInteger loads = new AtomicInteger();
private final Timer timer = new Timer(true);
private final ObjectGenerator<T> generator;
private final Random random = new Random();
public SoRDao(ObjectGenerator<T> generator) {
this.generator = generator;
timer.schedule(new TimerTask() {
@Override
public void run() {
tps = Math.max(MAX_TPS - (loads.getAndSet(0) * 1000 / INTERVAL_MS), 1);
}
}, INTERVAL_MS, INTERVAL_MS);
}
public T loadData(Long key) {
int latency = 0;
try {
loads.incrementAndGet();
latency = (Math.max(1000 / tps, 1));
} catch (Exception e) {
e.printStackTrace();
}
try {
int variance = latency / 10; // variance = 0.1
latency = latency - (variance);
latency = latency + random.nextInt(2 * Math.max(variance, 1));
Thread.sleep(latency); // sleep between 0.9 x latency and 1.1 x latency
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
return generator.generate(key);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/dao/SorLoaderWriter.java | scale-continuum/src/main/java/org/terracotta/sample/dao/SorLoaderWriter.java | package org.terracotta.sample.dao;
import org.ehcache.spi.loaderwriter.BulkCacheLoadingException;
import org.ehcache.spi.loaderwriter.BulkCacheWritingException;
import org.ehcache.spi.loaderwriter.CacheLoaderWriter;
import java.util.HashMap;
import java.util.Map;
/**
* @author Aurelien Broszniowski
*/
public class SorLoaderWriter implements CacheLoaderWriter<Long, byte[]> {
private final SoRDao<byte[]> soRDao;
public SorLoaderWriter(final SoRDao<byte[]> soRDao) {
this.soRDao = soRDao;
}
@Override
public byte[] load(final Long key) throws Exception {
return soRDao.loadData(key);
}
@Override
public Map<Long, byte[]> loadAll(final Iterable<? extends Long> iterable) throws BulkCacheLoadingException, Exception {
Map<Long, byte[]> results = new HashMap<>();
for (Long key : iterable) {
results.put(key, soRDao.loadData(key));
}
return results;
}
@Override
public void write(final Long aLong, final byte[] bytes) throws Exception {
throw new UnsupportedOperationException();
}
@Override
public void writeAll(final Iterable<? extends Map.Entry<? extends Long, ? extends byte[]>> iterable) throws BulkCacheWritingException, Exception {
throw new UnsupportedOperationException();
}
@Override
public void delete(final Long aLong) throws Exception {
throw new UnsupportedOperationException();
}
@Override
public void deleteAll(final Iterable<? extends Long> iterable) throws BulkCacheWritingException, Exception {
throw new UnsupportedOperationException();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/service/DataService.java | scale-continuum/src/main/java/org/terracotta/sample/service/DataService.java | package org.terracotta.sample.service;
/**
* @author Aurelien Broszniowski
*/
public interface DataService<T> {
T loadData(Long key);
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/service/CachedDataService.java | scale-continuum/src/main/java/org/terracotta/sample/service/CachedDataService.java | package org.terracotta.sample.service;
import org.ehcache.Cache;
/**
* @author Aurelien Broszniowski
*/
public class CachedDataService implements DataService<byte[]> {
private final Cache<Long, byte[]> cache;
public CachedDataService(final Cache<Long, byte[]> cache) {
this.cache = cache;
}
@Override
public byte[] loadData(final Long key) {
return cache.get(key);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/scale-continuum/src/main/java/org/terracotta/sample/service/UncachedDataService.java | scale-continuum/src/main/java/org/terracotta/sample/service/UncachedDataService.java | package org.terracotta.sample.service;
import io.rainfall.ObjectGenerator;
import org.terracotta.sample.dao.SoRDao;
/**
* @author Aurelien Broszniowski
*/
public class UncachedDataService<T> implements DataService<T> {
private final SoRDao<T> soRDao;
public UncachedDataService(final ObjectGenerator<T> valueGenerator) {
soRDao = new SoRDao<>(valueGenerator);
}
@Override
public T loadData(Long key) {
return soRDao.loadData(key);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/clustered/src/main/java/org/ehcache/sample/ClusteredXML.java | clustered/src/main/java/org/ehcache/sample/ClusteredXML.java | package org.ehcache.sample;
import java.net.URL;
import org.ehcache.Cache;
import org.ehcache.CacheManager;
import org.ehcache.config.Configuration;
import org.ehcache.xml.XmlConfiguration;
import org.slf4j.Logger;
import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManager;
import static org.slf4j.LoggerFactory.getLogger;
public class ClusteredXML {
private static final Logger LOGGER = getLogger(ClusteredXML.class);
public static void main(String[] args) {
LOGGER.info("Creating clustered cache manager from XML");
URL myUrl = ClusteredXML.class.getResource("/ehcache.xml");
Configuration xmlConfig = new XmlConfiguration(myUrl);
try (CacheManager cacheManager = newCacheManager(xmlConfig)) {
cacheManager.init();
Cache<Long, String> basicCache = cacheManager.getCache("basicCache", Long.class, String.class);
LOGGER.info("Getting from cache");
String value = basicCache.get(1L);
LOGGER.info("Retrieved '{}'", value);
LOGGER.info("Closing cache manager");
}
LOGGER.info("Exiting");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/clustered/src/main/java/org/ehcache/sample/ClusteredProgrammatic.java | clustered/src/main/java/org/ehcache/sample/ClusteredProgrammatic.java | package org.ehcache.sample;
import java.net.URI;
import org.ehcache.Cache;
import org.ehcache.CacheManager;
import org.slf4j.Logger;
import static java.net.URI.create;
import static org.ehcache.clustered.client.config.builders.ClusteredResourcePoolBuilder.clusteredDedicated;
import static org.ehcache.clustered.client.config.builders.ClusteringServiceConfigurationBuilder.cluster;
import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder;
import static org.ehcache.config.builders.CacheManagerBuilder.newCacheManagerBuilder;
import static org.ehcache.config.builders.ResourcePoolsBuilder.heap;
import static org.ehcache.config.units.MemoryUnit.MB;
import static org.slf4j.LoggerFactory.getLogger;
public class ClusteredProgrammatic {
private static final Logger LOGGER = getLogger(ClusteredProgrammatic.class);
public static void main(String[] args) {
LOGGER.info("Creating clustered cache manager");
URI uri = create("terracotta://localhost:9410/clustered");
try (CacheManager cacheManager = newCacheManagerBuilder()
.with(cluster(uri).autoCreate().defaultServerResource("default-resource"))
.withCache("basicCache",
newCacheConfigurationBuilder(Long.class, String.class,
heap(100).offheap(1, MB).with(clusteredDedicated(5, MB))))
.build(true)) {
Cache<Long, String> basicCache = cacheManager.getCache("basicCache", Long.class, String.class);
LOGGER.info("Putting to cache");
basicCache.put(1L, "da one!");
LOGGER.info("Closing cache manager");
}
LOGGER.info("Exiting");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/caching-still-matters/src/main/java/SystemOfRecord.java | caching-still-matters/src/main/java/SystemOfRecord.java | import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public final class SystemOfRecord {
private static final Logger LOG = LoggerFactory.getLogger(AsideVsPassthrough.class);
private static final int MAX_CONNECTIONS = 5;
private static final SystemOfRecord systemOfRecord = new SystemOfRecord();
private volatile int activeConnections = 0;
private final Map<String, String> records = new HashMap<>();
private SystemOfRecord() {
records.put("key", "value");
}
public String load(String key) {
LOG.warn("Someone is accessing the slow SoR to load : " + key);
if(!acquireConnection()) {
throw new IllegalStateException("No connection available");
}
try {
TimeUnit.SECONDS.sleep(10);
return records.get(key);
} catch (InterruptedException e) {
// Get out
return null;
} finally {
releaseConnection();
}
}
synchronized boolean acquireConnection() {
if(activeConnections >= MAX_CONNECTIONS) {
LOG.error("No connections available");
return false;
};
activeConnections++;
return true;
}
synchronized void releaseConnection() {
activeConnections--;
}
public static SystemOfRecord getSystemOfRecord() {
return systemOfRecord;
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/caching-still-matters/src/main/java/CacheUsage.java | caching-still-matters/src/main/java/CacheUsage.java | import org.ehcache.config.CacheConfiguration;
import org.ehcache.config.builders.CacheConfigurationBuilder;
import org.ehcache.config.builders.ResourcePoolsBuilder;
import org.ehcache.jsr107.Eh107Configuration;
import org.ehcache.jsr107.EhcacheCachingProvider;
import java.util.concurrent.TimeUnit;
import java.util.stream.IntStream;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.MutableConfiguration;
/**
* Show different caches. Some are correctly used, some requires attention. See the metrics to figure it out.
*/
public class CacheUsage {
public static void main(String[] args) throws Exception {
MutableConfiguration<Integer, String> configuration = new MutableConfiguration<>();
configuration.setStatisticsEnabled(true);
EhcacheCachingProvider provider = getCachingProvider();
try(CacheManager cacheManager = provider.getCacheManager()) {
// Just do not use it
Cache<Integer, String> unused = cacheManager.createCache("unused", configuration);
IntStream.range(0, 1000).forEach(i -> unused.put(i, "" + i));
// Hit the empty cache
Cache<Integer, String> alwaysmiss = cacheManager.createCache("alwaysmiss", configuration);
IntStream.range(0, 1000).forEach(i -> alwaysmiss.get(i));
// Put some stuff but misses most of the time
Cache<Integer, String> mostlymiss = cacheManager.createCache("mostlymiss", configuration);
IntStream.range(0, 250).forEach(i -> mostlymiss.put(i, "" + i));
IntStream.range(0, 1000).forEach(i -> mostlymiss.get(i));
// Cause evictions
CacheConfiguration<Integer, String> cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Integer.class, String.class,
ResourcePoolsBuilder.heap(1000)).build();
Cache<Integer, String> evicts = cacheManager.createCache("evicts", Eh107Configuration.fromEhcacheCacheConfiguration(cacheConfiguration));
IntStream.range(0, 10_000).forEach(i -> {
evicts.put(i, "" + i);
if(i % 2 == 0) {
evicts.get(i);
}
});
// Nicely working cache
Cache<Integer, String> works = cacheManager.createCache("works", configuration);
IntStream.range(0, 950).forEach(i -> works.put(i, "" + i));
IntStream.range(0, 1000).forEach(i -> works.get(i));
Cache<Integer, String> notthatused = cacheManager.createCache("notthatused", configuration);
IntStream.range(0, 1000).forEach(i -> notthatused.put(i, "" + i));
notthatused.get(0);
TimeUnit.MINUTES.sleep(10L);
}
}
private static EhcacheCachingProvider getCachingProvider() {
return (EhcacheCachingProvider) Caching.getCachingProvider();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/caching-still-matters/src/main/java/JvmCaching.java | caching-still-matters/src/main/java/JvmCaching.java |
/**
* Demonstrate that the JDK is caching the boxed form of the most frequently used integers.
*/
public class JvmCaching {
public static void main(String[] args) {
if(Integer.valueOf(100) == Integer.valueOf(100)) {
System.out.println("Same instance");
} else {
System.err.println("Not the same!");
}
// for (int i = -140; i < 150 ; i++) {
// boolean cached = Integer.valueOf(i) == Integer.valueOf(i);
// System.out.printf("Boxed %-4s cached? %s%n", i, (cached ? "YES" : "No :-("));
// }
}
} | java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/caching-still-matters/src/main/java/AsideVsPassthrough.java | caching-still-matters/src/main/java/AsideVsPassthrough.java | import org.ehcache.Cache;
import org.ehcache.CacheManager;
import org.ehcache.config.CacheConfiguration;
import org.ehcache.config.builders.CacheConfigurationBuilder;
import org.ehcache.config.builders.CacheManagerBuilder;
import org.ehcache.config.builders.ExpiryPolicyBuilder;
import org.ehcache.config.builders.ResourcePoolsBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.stream.IntStream;
public class AsideVsPassthrough implements AutoCloseable {
public static void main(String[] args) throws InterruptedException {
try(AsideVsPassthrough app = new AsideVsPassthrough()) {
app.initSystemOfRecords();
app.initCacheAside();
// app.initCacheThrough();
ExecutorService executor = Executors.newFixedThreadPool(10);
IntStream.range(0, 10)
.forEach(iteration -> executor.submit(() -> app.retrieveAndDisplayTheValueAside()));
// .forEach(iteration -> executor.submit(() -> app.retrieveAndDisplayTheValueThrough()));
executor.shutdown();
executor.awaitTermination(20, TimeUnit.SECONDS);
}
}
private static final Logger LOG = LoggerFactory.getLogger(AsideVsPassthrough.class);
private CacheManager cacheManager;
private SystemOfRecord systemOfRecord;
private void initCacheAside() {
LOG.info("Init Cache");
CacheConfiguration<String, String> cacheConfiguration = CacheConfigurationBuilder
.newCacheConfigurationBuilder(String.class, String.class, ResourcePoolsBuilder
.heap(100))
.withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(60)))
.build();
cacheManager = CacheManagerBuilder.newCacheManagerBuilder()
.withCache("myCache", cacheConfiguration)
.build();
cacheManager.init();
}
private void initCacheThrough() {
LOG.info("Init Cache");
CacheConfigurationBuilder<String, String> cacheConfiguration = CacheConfigurationBuilder
.newCacheConfigurationBuilder(String.class, String.class, ResourcePoolsBuilder
.heap(100))
.withExpiry(ExpiryPolicyBuilder.timeToLiveExpiration(Duration.ofSeconds(60)))
.withLoaderWriter(new SorLoaderWriter(systemOfRecord));
cacheManager = CacheManagerBuilder.newCacheManagerBuilder()
.withCache("myCache", cacheConfiguration)
.build(true);
}
private void retrieveAndDisplayTheValueAside() {
Cache<String, String> myCache = cacheManager.getCache("myCache", String.class, String.class);
String value = myCache.get("key");
if (value == null) {
try {
value = systemOfRecord.load("key");
} catch(IllegalStateException e) {
LOG.error("Fail to access SoR");
return;
}
myCache.put("key", value);
}
LOG.info("We could retrieve the value : " + value);
}
private void retrieveAndDisplayTheValueThrough() {
Cache<String, String> myCache = cacheManager.getCache("myCache", String.class, String.class);
String value = myCache.get("key");
LOG.info("We could retrieve the value : " + value);
}
private void initSystemOfRecords() {
LOG.info("Init SystemOfRecords");
systemOfRecord = SystemOfRecord.getSystemOfRecord();
}
@Override
public void close() {
cacheManager.close();
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/caching-still-matters/src/main/java/SorLoaderWriter.java | caching-still-matters/src/main/java/SorLoaderWriter.java | import org.ehcache.spi.loaderwriter.CacheLoaderWriter;
/**
* Created by adah on 2017-02-20.
*/
public class SorLoaderWriter implements CacheLoaderWriter<String, String> {
private final SystemOfRecord systemOfRecord;
public SorLoaderWriter(SystemOfRecord systemOfRecord) {
this.systemOfRecord = systemOfRecord;
}
@Override
public String load(String key) {
return systemOfRecord.load(key);
}
@Override
public void write(String key, String value) {
}
@Override
public void delete(String key) {
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/jsr107/src/main/java/org/ehcache/sample/CreateExtendedJCache.java | jsr107/src/main/java/org/ehcache/sample/CreateExtendedJCache.java | package org.ehcache.sample;
import org.slf4j.Logger;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.FactoryBuilder;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.expiry.CreatedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.spi.CachingProvider;
import java.util.concurrent.TimeUnit;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Created by fabien.sanglier on 10/6/16.
*/
public class CreateExtendedJCache extends BaseJCacheTester {
private static final Logger LOGGER = getLogger(CreateExtendedJCache.class);
public static void main(String[] args) throws Exception {
// pass in the number of object you want to generate, default is 100
int numberOfObjects = Integer.parseInt(args.length == 0 ? "5000" : args[0]);
int numberOfIteration = Integer.parseInt(args.length == 0 ? "5" : args[1]);
int sleepTimeMillisBetweenIterations = Integer.parseInt(args.length == 0 ? "1000" : args[2]);
new CreateExtendedJCache().run(numberOfIteration, numberOfObjects, sleepTimeMillisBetweenIterations);
LOGGER.info("Exiting");
}
public void run(int numberOfIteration, int numberOfObjectPerIteration, int sleepTimeMillisBetweenIterations) throws Exception {
LOGGER.info("Extended JCache testing BEGIN - Creating extended JCache programmatically using ehcache templates.");
//finds ehcache provider automatically if it is in the classpath
CachingProvider cachingProvider = Caching.getCachingProvider();
// If there are multiple providers in your classpath, use the fully qualified name to retrieve the Ehcache caching provider.
//CachingProvider cachingProvider = Caching.getCachingProvider("org.ehcache.jsr107.EhcacheCachingProvider");
// This time, load the cache manager from the XML to extend JSR107 by using the cache-templates created
String xmlClassPath = System.getProperty("jsr107.config.classpath", "ehcache-jsr107-extendedWithTemplates.xml");
try (CacheManager cacheManager = cachingProvider.getCacheManager(
Thread.currentThread().getContextClassLoader().getResource(xmlClassPath).toURI(),
Thread.currentThread().getContextClassLoader())) {
LOGGER.info("----------------------------------------------------------------");
String cacheName = "myJCache";
//this new cache should use the default maxEntriesOnHeapCache template defined in the config
LOGGER.info("Cache testing with new cache name {} - " +
"The new cache should use the default 'maxEntriesOnHeapCache' template defined in the config.", cacheName);
Cache<Long, String> myJCache = cacheManager.createCache(
cacheName,
new MutableConfiguration<Long, String>()
.setTypes(Long.class, String.class)
.setStoreByValue(false)
.setStatisticsEnabled(true)
.setExpiryPolicyFactory(FactoryBuilder.factoryOf(new CreatedExpiryPolicy(new Duration(TimeUnit.SECONDS, 5)))));
simpleGetsAndPutsCacheTest(myJCache, numberOfIteration, numberOfObjectPerIteration, sleepTimeMillisBetweenIterations, new KeyValueGenerator<Long, String>() {
@Override
public Long getKey(Number k) {
return new Long(k.longValue());
}
@Override
public String getValue(Number v) {
return String.format("Da One %s!!", v.toString());
}
});
LOGGER.info("----------------------------------------------------------------");
cacheName = "byRefCache";
//this new cache should use the default byRefCache template defined in the config
LOGGER.info("Cache testing with cache name {} - " +
"The new cache should use the jsr107 default matched by name '{}' defined in the config.", cacheName, cacheName);
LOGGER.info("Cache testing with cache name {} - ", cacheName);
//this new cache should use the default maxEntriesOnHeapCache template defined in the config
Cache<Long, String> myJCache2 = cacheManager.createCache(
cacheName,
new MutableConfiguration<Long, String>()
.setTypes(Long.class, String.class)
.setStoreByValue(false)
.setStatisticsEnabled(true)
.setExpiryPolicyFactory(FactoryBuilder.factoryOf(new CreatedExpiryPolicy(new Duration(TimeUnit.SECONDS, 5)))));
simpleGetsAndPutsCacheTest(myJCache, numberOfIteration, numberOfObjectPerIteration, sleepTimeMillisBetweenIterations, new KeyValueGenerator<Long, String>() {
@Override
public Long getKey(Number k) {
return new Long(k.longValue());
}
@Override
public String getValue(Number v) {
return String.format("Da One %s!!", v.toString());
}
});
LOGGER.info("----------------------------------------------------------------");
cacheName = "byValueTemplate";
//this new cache should use the default byRefCache template defined in the config
LOGGER.info("Cache testing with cache name {} - " +
"The new cache should use the jsr107 default matched by name '{}' defined in the config.", cacheName, cacheName);
//this new cache should use the default maxEntriesOnHeapCache template defined in the config
Cache<Long, String> myJCache3 = cacheManager.createCache(
cacheName,
new MutableConfiguration<Long, String>()
.setTypes(Long.class, String.class)
.setStoreByValue(false)
.setStatisticsEnabled(true)
.setExpiryPolicyFactory(FactoryBuilder.factoryOf(new CreatedExpiryPolicy(new Duration(TimeUnit.SECONDS, 5)))));
simpleGetsAndPutsCacheTest(myJCache, numberOfIteration, numberOfObjectPerIteration, sleepTimeMillisBetweenIterations, new KeyValueGenerator<Long, String>() {
@Override
public Long getKey(Number k) {
return new Long(k.longValue());
}
@Override
public String getValue(Number v) {
return String.format("Da One %s!!", v.toString());
}
});
}
LOGGER.info("Extended JCache testing DONE - Creating extended JCache programmatically using ehcache templates.");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/jsr107/src/main/java/org/ehcache/sample/BaseJCacheTester.java | jsr107/src/main/java/org/ehcache/sample/BaseJCacheTester.java | package org.ehcache.sample;
import org.ehcache.config.CacheRuntimeConfiguration;
import org.ehcache.config.ResourceType;
import org.ehcache.jsr107.Eh107Configuration;
import org.slf4j.Logger;
import javax.cache.Cache;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Created by fabien.sanglier on 10/8/16.
*/
public abstract class BaseJCacheTester {
private static final Logger LOGGER = getLogger(BaseJCacheTester.class);
protected <K, V> boolean simpleGetsAndPutsCacheTest(Cache<K, V> myJCache, int numberOfIteration, int numberOfObjectPerIteration, int sleepTimeMillisBetweenIterations, KeyValueGenerator<K, V> keyValueGenerator) throws InterruptedException {
try {
LOGGER.info("simpleGetsAndPutsCacheTest BEGIN");
if (null != myJCache) {
//config check to log
inspectCacheConfig(myJCache);
int iterationCount = 0;
while (iterationCount < numberOfIteration) {
LOGGER.info("simpleGetsAndPutsCacheTest -------------------------------------------------------------");
LOGGER.info("simpleGetsAndPutsCacheTest - Iteration #{} of {}", iterationCount + 1, numberOfIteration);
int hitCounts = 0, missCount = 0;
long startTime = System.nanoTime();
//iterate through numberOfObjects and use the iterator as the key, value does not matter at this time
int opsCount = 0;
for (opsCount = 0; opsCount < numberOfObjectPerIteration; opsCount++) {
K key = keyValueGenerator.getKey(opsCount);
V value;
if (null == (value = myJCache.get(key))) {
missCount++;
LOGGER.debug("Key {} NOT in cache. Putting it...", key);
myJCache.put(key, keyValueGenerator.getValue(opsCount));
} else {
hitCounts++;
LOGGER.debug("Key {} IS in cache. Value = {}", key, value);
}
}
long duration = System.nanoTime() - startTime;
LOGGER.info("simpleGetsAndPutsCacheTest - Done Iteration #{} of {} in {} micros - total cache ops: {} hits: {} / misses: {}", iterationCount + 1, numberOfIteration, (duration / 1000), opsCount, hitCounts, missCount);
iterationCount++;
if (sleepTimeMillisBetweenIterations > 0) {
LOGGER.info("Sleeping for {} millis...", sleepTimeMillisBetweenIterations);
Thread.sleep(sleepTimeMillisBetweenIterations);
}
}
LOGGER.info("simpleGetsAndPutsCacheTest - Successfully executed {} iteration.", iterationCount);
return true;
} else {
LOGGER.error("simpleGetsAndPutsCacheTest - Cache object is null...doing nothing...");
return false;
}
} finally {
LOGGER.info("simpleGetsAndPutsCacheTest DONE");
}
}
private <K, V> void inspectCacheConfig(Cache<K, V> myJCache) {
//get the configuration to print the size on heap
CacheRuntimeConfiguration<K, V> ehcacheConfig = (CacheRuntimeConfiguration<K, V>) myJCache
.getConfiguration(Eh107Configuration.class)
.unwrap(CacheRuntimeConfiguration.class);
long heapSize = ehcacheConfig.getResourcePools().getPoolForResource(ResourceType.Core.HEAP).getSize();
LOGGER.info(ehcacheConfig.toString());
LOGGER.info("Cache testing - Cache {} with heap capacity = {}", myJCache.getName(), heapSize);
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/jsr107/src/main/java/org/ehcache/sample/CreateBasicJCacheProgrammatic.java | jsr107/src/main/java/org/ehcache/sample/CreateBasicJCacheProgrammatic.java | package org.ehcache.sample;
import org.slf4j.Logger;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.FactoryBuilder;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.expiry.CreatedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.spi.CachingProvider;
import java.util.concurrent.TimeUnit;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Created by fabien.sanglier on 10/6/16.
*/
public class CreateBasicJCacheProgrammatic extends BaseJCacheTester {
private static final Logger LOGGER = getLogger(CreateBasicJCacheProgrammatic.class);
private static String CACHE_NAME = "myJCache";
public static void main(String[] args) throws Exception {
// pass in the number of object you want to generate, default is 100
int numberOfObjects = Integer.parseInt(args.length == 0 ? "5000" : args[0]);
int numberOfIteration = Integer.parseInt(args.length == 0 ? "5" : args[1]);
int sleepTimeMillisBetweenIterations = Integer.parseInt(args.length == 0 ? "1000" : args[2]);
new CreateBasicJCacheProgrammatic().run(numberOfIteration, numberOfObjects, sleepTimeMillisBetweenIterations);
LOGGER.info("Exiting");
}
public void run(int numberOfIteration, int numberOfObjectPerIteration, int sleepTimeMillisBetweenIterations) throws Exception {
LOGGER.info("JCache testing BEGIN - Creating JCache Programmatically without any XML config");
//finds ehcache provider automatically if it is in the classpath
CachingProvider cachingProvider = Caching.getCachingProvider();
// If there are multiple providers in your classpath, use the fully qualified name to retrieve the Ehcache caching provider.
//CachingProvider cachingProvider = Caching.getCachingProvider("org.ehcache.jsr107.EhcacheCachingProvider");
try (CacheManager cacheManager = cachingProvider.getCacheManager()) {
Cache<Long, String> myJCache = cacheManager.createCache(
CACHE_NAME,
new MutableConfiguration<Long, String>()
.setTypes(Long.class, String.class)
.setStoreByValue(false)
.setStatisticsEnabled(true)
.setExpiryPolicyFactory(FactoryBuilder.factoryOf(new CreatedExpiryPolicy(new Duration(TimeUnit.SECONDS, 5)))));
simpleGetsAndPutsCacheTest(myJCache, numberOfIteration, numberOfObjectPerIteration, sleepTimeMillisBetweenIterations, new KeyValueGenerator<Long, String>() {
@Override
public Long getKey(Number k) {
return new Long(k.longValue());
}
@Override
public String getValue(Number v) {
return String.format("Da One %s!!", v.toString());
}
});
}
LOGGER.info("JCache testing DONE - Creating JCache Programmatically without any XML config");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/jsr107/src/main/java/org/ehcache/sample/CreateBasicJCacheWithXML.java | jsr107/src/main/java/org/ehcache/sample/CreateBasicJCacheWithXML.java | package org.ehcache.sample;
import org.slf4j.Logger;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.spi.CachingProvider;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Created by fabien.sanglier on 10/6/16.
*/
public class CreateBasicJCacheWithXML extends BaseJCacheTester {
private static final Logger LOGGER = getLogger(CreateBasicJCacheWithXML.class);
public static void main(String[] args) throws Exception {
// pass in the number of object you want to generate, default is 100
int numberOfObjects = Integer.parseInt(args.length == 0 ? "5000" : args[0]);
int numberOfIteration = Integer.parseInt(args.length == 0 ? "5" : args[1]);
int sleepTimeMillisBetweenIterations = Integer.parseInt(args.length == 0 ? "1000" : args[2]);
new CreateBasicJCacheWithXML().run(numberOfIteration, numberOfObjects, sleepTimeMillisBetweenIterations);
LOGGER.info("Exiting");
}
public void run(int numberOfIteration, int numberOfObjectPerIteration, int sleepTimeMillisBetweenIterations) throws Exception {
LOGGER.info("JCache testing BEGIN - Creating cache manager via XML resource");
String xmlClassPath = System.getProperty("jsr107.config.classpath", "ehcache-jsr107-simple.xml");
CachingProvider cachingProvider = Caching.getCachingProvider();
try (CacheManager cacheManager = cachingProvider.getCacheManager(
Thread.currentThread().getContextClassLoader().getResource(xmlClassPath).toURI(),
Thread.currentThread().getContextClassLoader())) {
//go over all the caches
for (String cacheName : cacheManager.getCacheNames()) {
LOGGER.info("----------------------------------------------------------------");
LOGGER.info("Cache testing with cache name {}", cacheName);
Cache<Long, String> myJCache = cacheManager.getCache(cacheName, Long.class, String.class);
simpleGetsAndPutsCacheTest(myJCache, numberOfIteration, numberOfObjectPerIteration, sleepTimeMillisBetweenIterations, new KeyValueGenerator<Long, String>() {
@Override
public Long getKey(Number k) {
return new Long(k.longValue());
}
@Override
public String getValue(Number v) {
return String.format("Da One %s!!", v.toString());
}
});
}
}
LOGGER.info("JCache testing DONE - Creating cache manager via XML resource");
}
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
ehcache/ehcache3-samples | https://github.com/ehcache/ehcache3-samples/blob/51678052dbe66701eda3b7aff483252a94345ce5/jsr107/src/main/java/org/ehcache/sample/KeyValueGenerator.java | jsr107/src/main/java/org/ehcache/sample/KeyValueGenerator.java | package org.ehcache.sample;
/**
* Created by fabien.sanglier on 10/8/16.
*/
public interface KeyValueGenerator<K, V> {
K getKey(Number k);
V getValue(Number v);
}
| java | Apache-2.0 | 51678052dbe66701eda3b7aff483252a94345ce5 | 2026-01-05T02:40:54.100300Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/util/ExamplesTestData.java | src/test/java/org/apache/flink/graph/streaming/util/ExamplesTestData.java | /* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.util;
public class ExamplesTestData {
public static final String TRIANGLES_DATA =
"1 2 100\n" + "1 3 150\n"
+ "3 2 200\n" + "2 4 250\n"
+ "3 4 300\n" + "3 5 350\n"
+ "4 5 400\n" + "4 6 450\n"
+ "6 5 500\n" + "5 7 550\n"
+ "6 7 600\n" + "8 6 650\n"
+ "7 8 700\n" + "7 9 750\n"
+ "8 9 800\n" + "10 8 850\n"
+ "9 10 900\n" + "9 11 950\n"
+ "10 11 1000";
public static final String TRIANGLES_RESULT =
"(2,1199)\n(2,399)\n(3,799)\n";
public static final String DEGREES_DATA =
"1 2 +\n" + "2 3 +\n" + "1 4 +\n"
+ "2 3 -\n" + "3 4 +\n" + "1 2 -";
public static final String DEGREES_RESULT =
"(1,1)\n(1,2)\n" +
"(2,1)\n(1,1)\n(1,2)\n" +
"(2,2)\n(1,1)\n(1,2)\n" +
"(1,3)\n(2,1)\n(1,2)\n"+
"(1,3)\n(2,2)\n(1,2)\n" +
"(1,3)\n(2,1)\n(1,2)";
public static final String DEGREES_DATA_ZERO =
"1 2 +\n" + "2 3 +\n" + "1 4 +\n" +
"2 3 -\n" + "3 4 +\n" + "1 2 -\n" +
"2 3 -";
public static final String DEGREES_RESULT_ZERO =
"(1,1)\n(1,2)\n" +
"(2,1)\n(1,1)\n(1,2)\n" +
"(2,2)\n(1,1)\n(1,2)\n" +
"(1,3)\n(2,1)\n(1,2)\n"+
"(1,3)\n(2,2)\n(1,2)\n" +
"(1,3)\n(2,1)\n(1,2)\n" +
"(1,1)";
} | java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/util/AdjacencyListGraphTest.java | src/test/java/org/apache/flink/graph/streaming/util/AdjacencyListGraphTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.util;
import org.apache.flink.graph.streaming.summaries.AdjacencyListGraph;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class AdjacencyListGraphTest {
private AdjacencyListGraph<Integer> g = new AdjacencyListGraph<>();
@Test
public void testAddEdge() throws Exception {
g.addEdge(1, 2);
assertEquals(2, g.getAdjacencyMap().size());
assertEquals(true, g.getAdjacencyMap().get(1).contains(2));
assertEquals(true, g.getAdjacencyMap().get(2).contains(1));
assertEquals(1, g.getAdjacencyMap().get(1).size());
assertEquals(1, g.getAdjacencyMap().get(2).size());
g.addEdge(1, 3);
assertEquals(3, g.getAdjacencyMap().size());
assertEquals(true, g.getAdjacencyMap().get(1).contains(2));
assertEquals(true, g.getAdjacencyMap().get(1).contains(3));
assertEquals(true, g.getAdjacencyMap().get(3).contains(1));
g.addEdge(3, 1);
assertEquals(3, g.getAdjacencyMap().size());
assertEquals(2, g.getAdjacencyMap().get(1).size());
assertEquals(1, g.getAdjacencyMap().get(3).size());
g.addEdge(1, 2);
assertEquals(3, g.getAdjacencyMap().size());
assertEquals(2, g.getAdjacencyMap().get(1).size());
assertEquals(1, g.getAdjacencyMap().get(2).size());
}
@Test
public void testBoundedBFS() throws Exception {
g.reset();
g.addEdge(1, 4);
g.addEdge(4, 5);
g.addEdge(5, 6);
g.addEdge(4, 7);
g.addEdge(7, 8);
// check edge 2-3 (should be added)
assertEquals(false, g.boundedBFS(2, 3, 3));
g.addEdge(2, 3);
// check edge 3-4 (should be added)
assertEquals(false, g.boundedBFS(3, 4, 3));
g.addEdge(3, 4);
// check edge 3-6 (should be dropped)
assertEquals(true, g.boundedBFS(3, 6, 3));
// check edge 8-9 (should be added)
assertEquals(false, g.boundedBFS(8, 9, 3));
g.addEdge(8, 9);
// check edge 8-6 (should be added)
assertEquals(false, g.boundedBFS(8, 6, 3));
g.addEdge(8, 6);
// check edge 5-9 (should be dropped)
assertEquals(true, g.boundedBFS(5, 9, 3));
}
} | java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/util/DisjointSetTest.java | src/test/java/org/apache/flink/graph/streaming/util/DisjointSetTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.util;
import org.apache.flink.graph.streaming.summaries.DisjointSet;
import org.junit.Before;
import org.junit.Test;
import java.util.HashSet;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
public class DisjointSetTest {
private DisjointSet<Integer> ds = new DisjointSet<>();
@Before
public void setup() {
for (int i = 0; i < 8; i++) {
ds.union(i, i + 2);
}
}
@Test
public void testGetMatches() throws Exception {
assertEquals(ds.getMatches().size(), 10);
}
@Test
public void testFind() throws Exception {
Integer root1 = ds.find(0);
Integer root2 = ds.find(1);
assertNotEquals(root1, root2);
for (int i = 0; i < 10; i++) {
assertEquals((i % 2) == 0 ? root1 : root2, ds.find(i));
}
}
@Test
public void testMerge() throws Exception {
DisjointSet<Integer> ds2 = new DisjointSet<>();
for (int i = 0; i < 8; i++) {
ds2.union(i, i + 100);
}
ds2.merge(ds);
assertEquals(18, ds2.getMatches().size());
Set<Integer> treeRoots = new HashSet<>();
for (int element : ds2.getMatches().keySet()) {
treeRoots.add(ds2.find(element));
}
assertEquals(2, treeRoots.size());
}
} | java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/GraphStreamTestUtils.java | src/test/java/org/apache/flink/graph/streaming/test/GraphStreamTestUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.Vertex;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import java.util.ArrayList;
import java.util.List;
public class GraphStreamTestUtils {
public static final DataStream<Vertex<Long, Long>> getLongLongVertexDataStream(StreamExecutionEnvironment env) {
return env.fromCollection(getLongLongVertices());
}
public static final DataStream<Edge<Long, Long>> getLongLongEdgeDataStream(StreamExecutionEnvironment env) {
return env.fromCollection(getLongLongEdges());
}
/**
* @return a List of sample Vertex data.
*/
private static final List<Vertex<Long, Long>> getLongLongVertices() {
List<Vertex<Long, Long>> vertices = new ArrayList<>();
vertices.add(new Vertex<>(1L, 1L));
vertices.add(new Vertex<>(2L, 2L));
vertices.add(new Vertex<>(3L, 3L));
vertices.add(new Vertex<>(4L, 4L));
vertices.add(new Vertex<>(5L, 5L));
return vertices;
}
/**
* @return a List of sample Edge data.
*/
public static final List<Edge<Long, Long>> getLongLongEdges() {
List<Edge<Long, Long>> edges = new ArrayList<>();
edges.add(new Edge<>(1L, 2L, 12L));
edges.add(new Edge<>(1L, 3L, 13L));
edges.add(new Edge<>(2L, 3L, 23L));
edges.add(new Edge<>(3L, 4L, 34L));
edges.add(new Edge<>(3L, 5L, 35L));
edges.add(new Edge<>(4L, 5L, 45L));
edges.add(new Edge<>(5L, 1L, 51L));
return edges;
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestReverse.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestReverse.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestReverse extends AbstractTestBase {
@Test
public void testProgram() throws Exception {
/*
* Test reverse() with the sample graph
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "2,1,12\n" +
"3,1,13\n" +
"3,2,23\n" +
"4,3,34\n" +
"5,3,35\n" +
"5,4,45\n" +
"1,5,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.reverse().getEdges()
.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestUndirected.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestUndirected.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestUndirected extends AbstractTestBase {
@Test
public void testProgram() throws Exception {
/*
* Test undirected() with the sample graph
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,2,12\n" +
"2,1,12\n" +
"1,3,13\n" +
"3,1,13\n" +
"2,3,23\n" +
"3,2,23\n" +
"3,4,34\n" +
"4,3,34\n" +
"3,5,35\n" +
"5,3,35\n" +
"4,5,45\n" +
"5,4,45\n" +
"5,1,51\n" +
"1,5,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.undirected().getEdges().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestFilterVertices.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestFilterVertices.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.Vertex;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestFilterVertices extends AbstractTestBase {
@Test
public void testWithSimpleFilter() throws Exception {
/*
* Test filterVertices() with a simple filter
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "2,3,23\n" +
"3,4,34\n" +
"3,5,35\n" +
"4,5,45\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.filterVertices(new LowVertexKeyFilter())
.getEdges().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testWithEmptyFilter() throws Exception {
/*
* Test filterVertices() with a filter that constantly returns true
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,2,12\n" +
"1,3,13\n" +
"2,3,23\n" +
"3,4,34\n" +
"3,5,35\n" +
"4,5,45\n" +
"5,1,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.filterVertices(new EmptyFilter())
.getEdges().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testWithDiscardFilter() throws Exception {
/*
* Test filterVertices() with a filter that constantly returns false
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.filterVertices(new DiscardFilter())
.getEdges().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
private static final class LowVertexKeyFilter implements FilterFunction<Vertex<Long, NullValue>> {
@Override
public boolean filter(Vertex<Long, NullValue> vertex) throws Exception {
return vertex.getId() > 1;
}
}
private static final class EmptyFilter implements FilterFunction<Vertex<Long, NullValue>> {
@Override
public boolean filter(Vertex<Long, NullValue> vertex) throws Exception {
return true;
}
}
private static final class DiscardFilter implements FilterFunction<Vertex<Long, NullValue>> {
@Override
public boolean filter(Vertex<Long, NullValue> vertex) throws Exception {
return false;
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestMapEdges.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestMapEdges.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestMapEdges extends AbstractTestBase {
@Test
public void testWithSameType() throws Exception {
/*
* Test mapEdges() keeping the same edge types
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,2,13\n" +
"1,3,14\n" +
"2,3,24\n" +
"3,4,35\n" +
"3,5,36\n" +
"4,5,46\n" +
"5,1,52\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.mapEdges(new AddOneMapper())
.getEdges()
.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testWithTupleType() throws Exception {
/*
* Test mapEdges() converting the edge value type to tuple
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,2,(12,13)\n" +
"1,3,(13,14)\n" +
"2,3,(23,24)\n" +
"3,4,(34,35)\n" +
"3,5,(35,36)\n" +
"4,5,(45,46)\n" +
"5,1,(51,52)\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.mapEdges(new ToTuple2Mapper())
.getEdges()
.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testChainedMaps() throws Exception {
/*
* Test mapEdges() where two maps are chained together
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,2,(13,14)\n" +
"1,3,(14,15)\n" +
"2,3,(24,25)\n" +
"3,4,(35,36)\n" +
"3,5,(36,37)\n" +
"4,5,(46,47)\n" +
"5,1,(52,53)\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.mapEdges(new AddOneMapper())
.mapEdges(new ToTuple2Mapper())
.getEdges()
.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
private static final class AddOneMapper implements MapFunction<Edge<Long, Long>, Long> {
@Override
public Long map(Edge<Long, Long> edge) throws Exception {
return edge.getValue() + 1;
}
}
private static final class ToTuple2Mapper implements MapFunction<Edge<Long, Long>, Tuple2<Long, Long>> {
@Override
public Tuple2<Long, Long> map(Edge<Long, Long> edge) throws Exception {
return new Tuple2<>(edge.getValue(), edge.getValue() + 1);
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestUnion.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestUnion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class TestUnion extends AbstractTestBase {
@Test
public void testProgram() throws Exception {
/*
* Test union() with two simple graphs
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,2,12\n" +
"1,3,13\n" +
"2,3,23\n" +
"3,4,34\n" +
"3,5,35\n" +
"4,5,45\n" +
"5,1,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
List<Edge<Long, Long>> edgesA = new ArrayList<>();
edgesA.add(new Edge<>(1L, 2L, 12L));
edgesA.add(new Edge<>(1L, 3L, 13L));
edgesA.add(new Edge<>(2L, 3L, 23L));
edgesA.add(new Edge<>(3L, 4L, 34L));
List<Edge<Long, Long>> edgesB = new ArrayList<>();
edgesB.add(new Edge<>(3L, 5L, 35L));
edgesB.add(new Edge<>(4L, 5L, 45L));
edgesB.add(new Edge<>(5L, 1L, 51L));
SimpleEdgeStream<Long, Long> graphA = new SimpleEdgeStream<>(env.fromCollection(edgesA), env);
SimpleEdgeStream<Long, Long> graphB = new SimpleEdgeStream<>(env.fromCollection(edgesB), env);
GraphStream<Long, NullValue, Long> graph = graphA.union(graphB);
graph.getEdges()
.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestNumberOfEntities.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestNumberOfEntities.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestNumberOfEntities extends AbstractTestBase {
@Test
public void testNumberOfVertices() throws Exception {
/*
* Test numberOfVertices() with the sample graph
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1\n" +
"2\n" +
"3\n" +
"4\n" +
"5\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.numberOfVertices().map(new MapFunction<Long, Tuple1<Long>>() {
@Override
public Tuple1<Long> map(Long value) throws Exception {
return new Tuple1<>(value);
}
}).writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testNumberOfEdges() throws Exception {
/*
* Test numberOfEdges() with the sample graph
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1\n" +
"2\n" +
"3\n" +
"4\n" +
"5\n" +
"6\n" +
"7\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.numberOfEdges().map(new MapFunction<Long, Tuple1<Long>>() {
@Override
public Tuple1<Long> map(Long value) throws Exception {
return new Tuple1<>(value);
}
}).writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestFilterEdges.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestFilterEdges.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestFilterEdges extends AbstractTestBase {
@Test
public void testWithSimpleFilter() throws Exception {
/*
* Test filterEdges() with a simple filter
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "2,3,23\n" +
"3,4,34\n" +
"3,5,35\n" +
"4,5,45\n" +
"5,1,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.filterEdges(new LowEdgeValueFilter())
.getEdges().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testWithEmptyFilter() throws Exception {
/*
* Test filterEdges() with a filter that constantly returns true
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,2,12\n" +
"1,3,13\n" +
"2,3,23\n" +
"3,4,34\n" +
"3,5,35\n" +
"4,5,45\n" +
"5,1,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.filterEdges(new EmptyFilter())
.getEdges().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testWithDiscardFilter() throws Exception {
/*
* Test filterEdges() with a filter that constantly returns false
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.filterEdges(new DiscardFilter())
.getEdges().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
private static final class LowEdgeValueFilter implements FilterFunction<Edge<Long, Long>> {
@Override
public boolean filter(Edge<Long, Long> edge) throws Exception {
return edge.getValue() > 20;
}
}
private static final class EmptyFilter implements FilterFunction<Edge<Long, Long>> {
@Override
public boolean filter(Edge<Long, Long> edge) throws Exception {
return true;
}
}
private static final class DiscardFilter implements FilterFunction<Edge<Long, Long>> {
@Override
public boolean filter(Edge<Long, Long> edge) throws Exception {
return false;
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestGetVertices.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestGetVertices.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestGetVertices extends AbstractTestBase {
@Test
public void test() throws Exception {
/*
* Test getVertices() with the sample graph
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,(null)\n" +
"2,(null)\n" +
"3,(null)\n" +
"4,(null)\n" +
"5,(null)\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.getVertices().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestSlice.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestSlice.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import java.util.concurrent.TimeUnit;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.EdgeDirection;
import org.apache.flink.graph.streaming.EdgesApply;
import org.apache.flink.graph.streaming.EdgesFold;
import org.apache.flink.graph.streaming.EdgesReduce;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.util.Collector;
import org.junit.Test;
public class TestSlice extends AbstractTestBase {
@Test
public void testFoldNeighborsDefault() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,25\n" +
"2,23\n" +
"3,69\n" +
"4,45\n" +
"5,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, Long>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS))
.foldNeighbors(new Tuple2<Long, Long>(0l, 0l), new SumEdgeValues());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testFoldNeighborsIn() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,51\n" +
"2,12\n" +
"3,36\n" +
"4,34\n" +
"5,80\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, Long>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS), EdgeDirection.IN)
.foldNeighbors(new Tuple2<Long, Long>(0l, 0l), new SumEdgeValues());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testFoldNeighborsAll() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,76\n" +
"2,35\n" +
"3,105\n" +
"4,79\n" +
"5,131\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, Long>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS), EdgeDirection.ALL)
.foldNeighbors(new Tuple2<Long, Long>(0l, 0l), new SumEdgeValues());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testReduceOnNeighborsDefault() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,25\n" +
"2,23\n" +
"3,69\n" +
"4,45\n" +
"5,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, Long>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS))
.reduceOnEdges(new SumEdgeValuesReduce());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testReduceOnNeighborsIn() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,51\n" +
"2,12\n" +
"3,36\n" +
"4,34\n" +
"5,80\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, Long>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS), EdgeDirection.IN)
.reduceOnEdges(new SumEdgeValuesReduce());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testReduceOnNeighborsAll() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,76\n" +
"2,35\n" +
"3,105\n" +
"4,79\n" +
"5,131\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, Long>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS), EdgeDirection.ALL)
.reduceOnEdges(new SumEdgeValuesReduce());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testApplyOnNeighborsDefault() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,small\n" +
"2,small\n" +
"3,big\n" +
"4,small\n" +
"5,big\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, String>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS))
.applyOnNeighbors(new SumEdgeValuesApply());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testApplyOnNeighborsIn() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,big\n" +
"2,small\n" +
"3,small\n" +
"4,small\n" +
"5,big\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, String>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS), EdgeDirection.IN)
.applyOnNeighbors(new SumEdgeValuesApply());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testApplyOnNeighborsAll() throws Exception {
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,big\n" +
"2,small\n" +
"3,big\n" +
"4,big\n" +
"5,big\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
SimpleEdgeStream<Long, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
DataStream<Tuple2<Long, String>> sum = graph.slice(Time.of(1, TimeUnit.SECONDS), EdgeDirection.ALL)
.applyOnNeighbors(new SumEdgeValuesApply());
sum.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@SuppressWarnings("serial")
private static final class SumEdgeValues implements EdgesFold<Long, Long, Tuple2<Long, Long>> {
public Tuple2<Long, Long> foldEdges(Tuple2<Long, Long> accum, Long id, Long neighborID, Long edgeValue) {
accum.setField(id, 0);
accum.setField(accum.f1 + edgeValue, 1);
return accum;
}
}
@SuppressWarnings("serial")
private static final class SumEdgeValuesReduce implements EdgesReduce<Long> {
@Override
public Long reduceEdges(Long firstEdgeValue, Long secondEdgeValue) {
return firstEdgeValue + secondEdgeValue;
}
}
@SuppressWarnings("serial")
private static final class SumEdgeValuesApply implements EdgesApply<Long, Long, Tuple2<Long, String>> {
@Override
public void applyOnEdges(Long vertexID,
Iterable<Tuple2<Long, Long>> neighbors, Collector<Tuple2<Long, String>> out) {
long sum = 0;
for (Tuple2<Long, Long> n: neighbors) {
sum += n.f1;
}
if (sum > 50) {
out.collect(new Tuple2<Long, String>(vertexID, "big"));
}
else {
out.collect(new Tuple2<Long, String>(vertexID, "small"));
}
}
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestGetDegrees.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestGetDegrees.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestGetDegrees extends AbstractTestBase {
@Test
public void testGetDegrees() throws Exception {
/*
* Test getDegrees() with the sample graph
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,1\n" +
"1,2\n" +
"1,3\n" +
"2,1\n" +
"2,2\n" +
"3,1\n" +
"3,2\n" +
"3,3\n" +
"3,4\n" +
"4,1\n" +
"4,2\n" +
"5,1\n" +
"5,2\n" +
"5,3\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.getDegrees().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testGetInDegrees() throws Exception {
/*
* Test getInDegrees() with the sample graph
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,1\n" +
"2,1\n" +
"3,1\n" +
"3,2\n" +
"4,1\n" +
"5,1\n" +
"5,2\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.getInDegrees().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Test
public void testGetOutDegrees() throws Exception {
/*
* Test getOutDegrees() with the sample graph
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,1\n" +
"1,2\n" +
"2,1\n" +
"3,1\n" +
"3,2\n" +
"4,1\n" +
"5,1\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.getOutDegrees().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestGraphStreamCreation.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestGraphStreamCreation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
public class TestGraphStreamCreation extends AbstractTestBase {
@Test
public void testProgram() throws Exception {
/*
* Test create() with vertex and edge data streams
*/
final String resultPath = getTempDirPath("result");
final String expectedResult = "1,2,12\n" +
"1,3,13\n" +
"2,3,23\n" +
"3,4,34\n" +
"3,5,35\n" +
"4,5,45\n" +
"5,1,51\n";
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
GraphStream<Long, NullValue, Long> graph = new SimpleEdgeStream<>(GraphStreamTestUtils.getLongLongEdgeDataStream(env), env);
graph.getEdges().writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
vasia/gelly-streaming | https://github.com/vasia/gelly-streaming/blob/76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9/src/test/java/org/apache/flink/graph/streaming/test/operations/TestDistinct.java | src/test/java/org/apache/flink/graph/streaming/test/operations/TestDistinct.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.streaming.test.operations;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.streaming.GraphStream;
import org.apache.flink.graph.streaming.SimpleEdgeStream;
import org.apache.flink.graph.streaming.test.GraphStreamTestUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.test.util.AbstractTestBase;
import org.apache.flink.types.NullValue;
import org.junit.Test;
import java.util.List;
public class TestDistinct extends AbstractTestBase {
@Test
public void test() throws Exception {
final String expectedResult = "1,2,12\n" +
"1,3,13\n" +
"2,3,23\n" +
"3,4,34\n" +
"3,5,35\n" +
"4,5,45\n" +
"5,1,51\n";
final String resultPath = getTempDirPath("result");
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
List<Edge<Long, Long>> edges = GraphStreamTestUtils.getLongLongEdges();
edges.addAll(GraphStreamTestUtils.getLongLongEdges());
GraphStream<Long, NullValue, Long> graph =
new SimpleEdgeStream<>(env.fromCollection(edges), env);
graph.distinct().getEdges()
.writeAsCsv(resultPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
compareResultsByLinesInMemory(expectedResult, resultPath);
}
}
| java | Apache-2.0 | 76fb3f7d8bac7ff1bc96dd6110dac93f72692cf9 | 2026-01-05T02:40:57.674434Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.