repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/domain/impl/id/AnnotatedFieldIdPropertyResolver.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/domain/impl/id/AnnotatedFieldIdPropertyResolver.java | package com.mmnaseri.utils.spring.data.domain.impl.id;
import com.mmnaseri.utils.spring.data.domain.IdPropertyResolver;
import com.mmnaseri.utils.spring.data.error.MultipleIdPropertiesException;
import com.mmnaseri.utils.spring.data.error.PropertyTypeMismatchException;
import com.mmnaseri.utils.spring.data.tools.PropertyUtils;
import org.springframework.data.annotation.Id;
import org.springframework.util.ReflectionUtils;
import java.lang.reflect.Field;
import java.util.concurrent.atomic.AtomicReference;
import static com.mmnaseri.utils.spring.data.domain.impl.id.IdPropertyResolverUtils.isAnnotated;
/**
* This class will help resolve ID property name if the entity has a field that is annotated with
* {@link Id @Id}
*
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (9/23/15)
*/
@SuppressWarnings("WeakerAccess")
public class AnnotatedFieldIdPropertyResolver implements IdPropertyResolver {
@Override
public String resolve(final Class<?> entityType, Class<?> idType) {
final AtomicReference<Field> found = new AtomicReference<>();
// try to find the ID field
ReflectionUtils.doWithFields(
entityType,
field -> {
if (isAnnotated(field)) {
if (found.get() == null) {
found.set(field);
} else {
throw new MultipleIdPropertiesException(entityType);
}
}
});
final Field idAnnotatedField = found.get();
// if a field was found, try to get the ID property name
if (idAnnotatedField != null) {
if (!PropertyUtils.getTypeOf(idType)
.isAssignableFrom(PropertyUtils.getTypeOf(idAnnotatedField.getType()))) {
throw new PropertyTypeMismatchException(
entityType, idAnnotatedField.getName(), idType, idAnnotatedField.getType());
} else {
return idAnnotatedField.getName();
}
}
return null;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/domain/impl/id/EntityIdPropertyResolver.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/domain/impl/id/EntityIdPropertyResolver.java | package com.mmnaseri.utils.spring.data.domain.impl.id;
import com.mmnaseri.utils.spring.data.domain.IdPropertyResolver;
import com.mmnaseri.utils.spring.data.error.NoIdPropertyException;
import com.mmnaseri.utils.spring.data.error.PrimitiveIdTypeException;
import com.mmnaseri.utils.spring.data.query.PropertyDescriptor;
import com.mmnaseri.utils.spring.data.tools.PropertyUtils;
import com.mmnaseri.utils.spring.data.tools.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* This class will use all the magic implemented in the other ID property resolvers to find out the
* ID property for an entity.
*
* <p>The order in which conditions are considered is:
*
* <ol>
* <li>{@link AnnotatedGetterIdPropertyResolver Annotated getter}
* <li>{@link AnnotatedFieldIdPropertyResolver Annotated field}
* <li>{@link NamedGetterIdPropertyResolver Getter for ID property using name}
* <li>{@link NamedFieldIdPropertyResolver Field having proper name}
* </ol>
*
* <p>After all the above are considered, if nothing is found, a {@link NoIdPropertyException
* NoIdPropertyException} is thrown to show that the promised ID property was not found on the
* entity class.
*
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (9/23/15)
*/
public class EntityIdPropertyResolver implements IdPropertyResolver {
private static final Log log = LogFactory.getLog(EntityIdPropertyResolver.class);
private final AnnotatedGetterIdPropertyResolver annotatedGetterIdPropertyResolver =
new AnnotatedGetterIdPropertyResolver();
private final AnnotatedFieldIdPropertyResolver annotatedFieldIdPropertyResolver =
new AnnotatedFieldIdPropertyResolver();
private final NamedGetterIdPropertyResolver namedGetterIdPropertyResolver =
new NamedGetterIdPropertyResolver();
private final NamedFieldIdPropertyResolver namedFieldIdPropertyResolver =
new NamedFieldIdPropertyResolver();
@Override
public String resolve(Class<?> entityType, Class<?> idType) {
log.info(
"Trying to resolve the ID property for entity "
+ entityType
+ " using the annotated getter method");
String idProperty = annotatedGetterIdPropertyResolver.resolve(entityType, idType);
if (idProperty == null) {
log.info(
"Trying to resolve the ID property for entity "
+ entityType
+ " using the annotated ID field");
idProperty = annotatedFieldIdPropertyResolver.resolve(entityType, idType);
}
if (idProperty == null) {
log.info(
"Trying to resolve the ID property for entity "
+ entityType
+ " using the getter method");
idProperty = namedGetterIdPropertyResolver.resolve(entityType, idType);
}
if (idProperty == null) {
log.info("Trying to resolve the ID property for entity " + entityType + " using the field");
idProperty = namedFieldIdPropertyResolver.resolve(entityType, idType);
}
if (idProperty == null) {
log.error("No ID property was found for entity " + entityType);
throw new NoIdPropertyException(entityType);
}
final PropertyDescriptor descriptor =
PropertyUtils.getPropertyDescriptor(entityType, StringUtils.capitalize(idProperty));
if (descriptor.getType().isPrimitive()) {
throw new PrimitiveIdTypeException(entityType, idProperty);
}
return idProperty;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultQueryDslPredicateExecutor.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultQueryDslPredicateExecutor.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.domain.DataStoreAware;
import com.mmnaseri.utils.spring.data.store.DataStore;
import com.querydsl.collections.CollQuery;
import com.querydsl.collections.CollQueryFactory;
import com.querydsl.core.alias.Alias;
import com.querydsl.core.types.OrderSpecifier;
import com.querydsl.core.types.Predicate;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
/**
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (4/28/16)
*/
public class DefaultQueryDslPredicateExecutor extends PagingAndSortingSupport
implements DataStoreAware {
private DataStore dataStore;
private Object alias;
public Object findOne(Predicate predicate) {
return ((CollQuery) CollQueryFactory.from(alias, dataStore.retrieveAll()).where(predicate))
.fetchOne();
}
public Iterable findAll(Predicate predicate) {
return ((CollQuery) CollQueryFactory.from(alias, dataStore.retrieveAll()).where(predicate))
.fetch();
}
public Iterable findAll(Predicate predicate, Sort sort) {
return PagingAndSortingUtils.sort(
((CollQuery) CollQueryFactory.from(alias, dataStore.retrieveAll()).where(predicate))
.fetch(),
sort);
}
public Page findAll(Predicate predicate, Pageable pageable) {
return page(
((CollQuery) CollQueryFactory.from(alias, dataStore.retrieveAll()).where(predicate))
.fetch(),
pageable);
}
public long count(Predicate predicate) {
return ((CollQuery) CollQueryFactory.from(alias, dataStore.retrieveAll()).where(predicate))
.fetchCount();
}
public boolean exists(Predicate predicate) {
return ((CollQuery) CollQueryFactory.from(alias, dataStore.retrieveAll()).where(predicate))
.fetchCount()
> 0;
}
public Iterable findAll(OrderSpecifier... orders) {
//noinspection unchecked
return ((CollQuery) CollQueryFactory.from(alias, dataStore.retrieveAll()).orderBy(orders))
.fetch();
}
public Iterable findAll(Predicate predicate, OrderSpecifier... orders) {
//noinspection unchecked
return ((CollQuery)
CollQueryFactory.from(alias, dataStore.retrieveAll()).where(predicate).orderBy(orders))
.fetch();
}
@Override
public void setDataStore(DataStore dataStore) {
this.dataStore = dataStore;
this.alias = Alias.alias(dataStore.getEntityType());
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/ExampleMatcherQueryDescriptionExtractor.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/ExampleMatcherQueryDescriptionExtractor.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.domain.Modifier;
import com.mmnaseri.utils.spring.data.domain.Operator;
import com.mmnaseri.utils.spring.data.domain.OperatorContext;
import com.mmnaseri.utils.spring.data.domain.Parameter;
import com.mmnaseri.utils.spring.data.domain.RepositoryMetadata;
import com.mmnaseri.utils.spring.data.domain.impl.ImmutableParameter;
import com.mmnaseri.utils.spring.data.domain.impl.QueryDescriptionExtractor;
import com.mmnaseri.utils.spring.data.proxy.RepositoryFactoryConfiguration;
import com.mmnaseri.utils.spring.data.query.QueryDescriptor;
import com.mmnaseri.utils.spring.data.query.impl.DefaultQueryDescriptor;
import com.mmnaseri.utils.spring.data.tools.PropertyUtils;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.BeanWrapperImpl;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.ExampleMatcher;
import java.beans.PropertyDescriptor;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/8/16, 12:45 PM)
*/
public class ExampleMatcherQueryDescriptionExtractor implements QueryDescriptionExtractor<Example> {
@Override
public QueryDescriptor extract(
RepositoryMetadata repositoryMetadata,
RepositoryFactoryConfiguration configuration,
Example example) {
final OperatorContext operatorContext =
configuration.getDescriptionExtractor().getOperatorContext();
final Map<String, Object> values = extractValues(example.getProbe());
final ExampleMatcher matcher = example.getMatcher();
final List<Parameter> parameters = new ArrayList<>();
int index = 0;
for (Map.Entry<String, Object> entry : values.entrySet()) {
final String propertyPath = entry.getKey();
if (matcher.isIgnoredPath(propertyPath)) {
continue;
}
final Set<Modifier> modifiers = new HashSet<>();
final Operator operator;
if (entry.getValue() == null) {
if (ExampleMatcher.NullHandler.IGNORE.equals(matcher.getNullHandler())) {
continue;
} else {
operator = operatorContext.getBySuffix("IsNull");
}
} else {
if (ignoreCase(matcher, propertyPath)) {
modifiers.add(Modifier.IGNORE_CASE);
}
final ExampleMatcher.StringMatcher stringMatcher = stringMatcher(matcher, propertyPath);
if (ExampleMatcher.StringMatcher.STARTING.equals(stringMatcher)) {
operator = operatorContext.getBySuffix("StartsWith");
} else if (ExampleMatcher.StringMatcher.ENDING.equals(stringMatcher)) {
operator = operatorContext.getBySuffix("EndsWith");
} else if (ExampleMatcher.StringMatcher.CONTAINING.equals(stringMatcher)) {
operator = operatorContext.getBySuffix("Contains");
} else if (ExampleMatcher.StringMatcher.REGEX.equals(stringMatcher)) {
operator = operatorContext.getBySuffix("Matches");
} else {
operator = operatorContext.getBySuffix("Is");
}
}
parameters.add(
new ImmutableParameter(propertyPath, modifiers, new int[] {index++}, operator));
}
return new DefaultQueryDescriptor(
false,
null,
0,
null,
null,
Collections.singletonList(parameters),
configuration,
repositoryMetadata);
}
private ExampleMatcher.StringMatcher stringMatcher(ExampleMatcher matcher, String path) {
final ExampleMatcher.PropertySpecifier specifier =
matcher.getPropertySpecifiers().getForPath(path);
//noinspection ConstantConditions
return specifier != null ? specifier.getStringMatcher() : matcher.getDefaultStringMatcher();
}
private boolean ignoreCase(ExampleMatcher matcher, String path) {
final ExampleMatcher.PropertySpecifier specifier =
matcher.getPropertySpecifiers().getForPath(path);
//noinspection ConstantConditions
return matcher.isIgnoreCaseEnabled()
|| specifier != null && Boolean.TRUE.equals(specifier.getIgnoreCase());
}
/**
* Given an input object, this method will return a map from the property paths to their
* corresponding values
*
* @param object the input object
* @return the map of values
*/
private Map<String, Object> extractValues(Object object) {
final Map<String, Object> result = new HashMap<>();
final BeanWrapper wrapper = new BeanWrapperImpl(object);
for (PropertyDescriptor descriptor : wrapper.getPropertyDescriptors()) {
if (descriptor.getReadMethod() == null || descriptor.getWriteMethod() == null) {
continue;
}
final String propertyName = descriptor.getName();
final Object value = PropertyUtils.getPropertyValue(object, propertyName);
if (value == null) {
result.put(propertyName, null);
continue;
}
if (isIntractable(descriptor, value)) {
result.put(propertyName, value);
continue;
}
final Map<String, Object> children = extractValues(value);
for (Map.Entry<String, Object> entry : children.entrySet()) {
result.put(propertyName + "." + entry.getKey(), entry.getValue());
}
}
return result;
}
/**
* This method is used to determine if a given value should be broken down further or should it be
* passed in as it is
*
* @param descriptor the descriptor for the property
* @param value the value for the property
* @return {@literal true} if the value should be left alone
*/
private boolean isIntractable(PropertyDescriptor descriptor, Object value) {
final Class<?> type = descriptor.getPropertyType();
return type.isPrimitive()
|| type.getName().startsWith("java.lang.")
|| value instanceof Iterable
|| value instanceof Map;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/CrudRepositorySupport.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/CrudRepositorySupport.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.domain.DataStoreAware;
import com.mmnaseri.utils.spring.data.domain.KeyGenerator;
import com.mmnaseri.utils.spring.data.domain.KeyGeneratorAware;
import com.mmnaseri.utils.spring.data.domain.RepositoryMetadata;
import com.mmnaseri.utils.spring.data.domain.RepositoryMetadataAware;
import com.mmnaseri.utils.spring.data.store.DataStore;
import com.mmnaseri.utils.spring.data.tools.PropertyUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.LinkedList;
import java.util.List;
/**
* This implementation is used to factor out the commonalities between various Spring interfaces
* extending the {@link org.springframework.data.repository.CrudRepository} interface.
*
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (2015/11/09, 21:40)
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public class CrudRepositorySupport
implements DataStoreAware, RepositoryMetadataAware, KeyGeneratorAware<Object> {
private static final Log log = LogFactory.getLog(CrudRepositorySupport.class);
private KeyGenerator<?> keyGenerator;
private DataStore dataStore;
private RepositoryMetadata repositoryMetadata;
protected CrudRepositorySupport() {}
/**
* Saves the entity in the underlying data store, creating keys in the process, if necessary.
*
* @param entity the entity to save
* @return the saved entity (the exact same instance, with the difference that if the entity was
* newly inserted, it will have a key).
*/
public Object save(Object entity) {
if (entity instanceof Iterable) {
return save((Iterable) entity);
}
Object key = PropertyUtils.getPropertyValue(entity, repositoryMetadata.getIdentifierProperty());
log.info("The entity that is to be saved has a key with value " + key);
if (key == null && keyGenerator != null) {
log.info(
"The key was null, but the generator was not, so we are going to get a key for the entity");
key = keyGenerator.generate();
log.debug("The generated key for the entity was " + key);
PropertyUtils.setPropertyValue(entity, repositoryMetadata.getIdentifierProperty(), key);
}
if (key == null) {
log.warn(
"Attempting to save an entity without a key. This might result in an error. To fix this, specify "
+ "a key generator.");
}
//noinspection unchecked
dataStore.save(key, entity);
return entity;
}
/**
* Saves all the given entities
*
* @param entities entities to save (insert or update)
* @return saved entities
*/
public Iterable<Object> save(Iterable entities) {
final List<Object> list = new LinkedList<>();
log.info("Going to save a number of entities in the underlying data store");
log.debug(entities);
for (Object entity : entities) {
list.add(save(entity));
}
return list;
}
/**
* Inserts the given entity via the {@link #save(Object)} method.
*
* @param entity the entity to be inserted.
* @return the saved entity. This should result in the entity having a key.
*/
public Object insert(Object entity) {
return save(entity);
}
protected KeyGenerator<?> getKeyGenerator() {
return keyGenerator;
}
@Override
public final void setKeyGenerator(KeyGenerator<?> keyGenerator) {
this.keyGenerator = keyGenerator;
}
protected DataStore getDataStore() {
return dataStore;
}
@Override
public final void setDataStore(DataStore dataStore) {
this.dataStore = dataStore;
}
protected RepositoryMetadata getRepositoryMetadata() {
return repositoryMetadata;
}
@Override
public final void setRepositoryMetadata(RepositoryMetadata repositoryMetadata) {
this.repositoryMetadata = repositoryMetadata;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultJpaRepository.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultJpaRepository.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.error.EntityMissingKeyException;
import com.mmnaseri.utils.spring.data.store.DataStore;
import com.mmnaseri.utils.spring.data.store.QueueingDataStore;
import com.mmnaseri.utils.spring.data.tools.PropertyUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
/**
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (10/13/15)
*/
@SuppressWarnings({"unchecked", "WeakerAccess", "UnusedReturnValue"})
public class DefaultJpaRepository extends CrudRepositorySupport {
private static final Log log = LogFactory.getLog(DefaultJpaRepository.class);
/**
* If the underlying data store supports {@link QueueingDataStore queueing} and needs the queue to
* be flushed, this method will flush the queue. Otherwise, it will not do anything
*/
public void flush() {
final DataStore dataStore = getDataStore();
if (dataStore instanceof QueueingDataStore) {
log.info("Asking the data store to flush the underlying operation queue");
final QueueingDataStore store = (QueueingDataStore) dataStore;
store.flush();
}
}
/**
* Deletes the given entities by enclosing the actual delete in batch requests. If the underlying
* data store doesn't support {@link QueueingDataStore queueing}, this will be no different than
* simply sequentially deleting all the entities.
*
* @param entities entities to delete
* @return deleted entities
*/
public Iterable deleteInBatch(Iterable entities) {
final List<Object> keys = new LinkedList<>();
for (Object entity : entities) {
final Object key =
PropertyUtils.getPropertyValue(entity, getRepositoryMetadata().getIdentifierProperty());
if (key == null) {
log.error("There is no key set for the entity we were trying to delete");
throw new EntityMissingKeyException(
getRepositoryMetadata().getEntityType(),
getRepositoryMetadata().getIdentifierProperty());
}
keys.add(key);
}
return deleteByKeysInBatch(keys);
}
/**
* Deletes everything in the data store that's of the bound entity type
*
* @return deleted entities
*/
public Iterable deleteAllInBatch() {
log.info("Attempting to delete all the entities in the data store in a batch");
return deleteByKeysInBatch(getDataStore().keys());
}
/**
* Deletes entities bound to the passed keys in batch
*
* @param keys the keys
* @return deleted entities
*/
private Iterable deleteByKeysInBatch(Collection<Object> keys) {
final Object batch;
if (getDataStore() instanceof QueueingDataStore) {
log.debug("The data store support queueing, so we are going to start a batch");
batch = ((QueueingDataStore) getDataStore()).startBatch();
} else {
log.debug("The data store does not support batching, so this is the same as a normal delete");
batch = null;
}
final List result = new LinkedList();
for (Object key : keys) {
if (getDataStore().hasKey(key)) {
result.add(getDataStore().retrieve(key));
getDataStore().delete(key);
}
}
if (getDataStore() instanceof QueueingDataStore) {
log.debug("Ending the batch operation that was started previously.");
((QueueingDataStore) getDataStore()).endBatch(batch);
}
return result;
}
/**
* Returns the entity that has the given key
*
* @param key the key
* @return returns the entity or {@literal null} if it couldn't be found
*/
public Object getOne(Object key) {
if (getDataStore().hasKey(key)) {
log.info("Returning entity for key " + key);
return getDataStore().retrieve(key);
}
log.info("No entity was found with key " + key);
return null;
}
/**
* Saves the entity to the database and flushes the queue
*
* @param entity the entity
* @return the saved entity
*/
public Object saveAndFlush(Object entity) {
log.info("Saving entity " + entity + " to the data store.");
final Object saved = save(entity);
log.info("Going to flush the data store after the save");
flush();
return saved;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/package-info.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/package-info.java | /**
* This package implements the bridge pattern over the various repository interfaces in the Spring
* Data project. Each class should represent exactly one repository interface in the Spring data
* project.
*
* <p>Many of the interfaces in the Spring data project come with their own packaging and project.
* We have to check for the presence of such an interface before adding the bridge for it to the
* classpath.
*
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (4/12/16, 8:47 PM)
*/
package com.mmnaseri.utils.spring.data.repository;
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/PagingAndSortingSupport.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/PagingAndSortingSupport.java | package com.mmnaseri.utils.spring.data.repository;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import java.util.Collection;
/**
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/8/16, 11:50 AM)
*/
public abstract class PagingAndSortingSupport {
public static Page page(Collection entries, Pageable pageable) {
return PagingAndSortingUtils.page(entries, pageable);
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultGemfireRepository.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultGemfireRepository.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.domain.DataStoreAware;
import com.mmnaseri.utils.spring.data.domain.RepositoryMetadata;
import com.mmnaseri.utils.spring.data.domain.RepositoryMetadataAware;
import com.mmnaseri.utils.spring.data.store.DataStore;
import com.mmnaseri.utils.spring.data.tools.PropertyUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.data.gemfire.repository.Wrapper;
/**
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (10/13/15)
*/
public class DefaultGemfireRepository implements DataStoreAware, RepositoryMetadataAware {
private static final Log log = LogFactory.getLog(DefaultGemfireRepository.class);
private DataStore dataStore;
private RepositoryMetadata repositoryMetadata;
/**
* Saves the wrapped data object into the data store. If the wrapped object is also an instance of
* the type bound to this data store, it will set the key on the object
*
* @param wrapper the wrapper for the key and the object
* @return the saved entity
*/
public Object save(Wrapper<Object, Object> wrapper) {
log.info("Going to save a wrapped data store object");
final Object entity = wrapper.getEntity();
final Object key = wrapper.getKey();
if (repositoryMetadata.getEntityType().isInstance(entity)) {
log.debug(
"Since the entity is of the same type as the actual entity type supported by the data store, "
+ "we know how to set the key on the wrapped entity.");
PropertyUtils.setPropertyValue(entity, repositoryMetadata.getIdentifierProperty(), key);
}
//noinspection unchecked
dataStore.save(key, entity);
return entity;
}
@Override
public void setDataStore(DataStore dataStore) {
this.dataStore = dataStore;
}
@Override
public void setRepositoryMetadata(RepositoryMetadata repositoryMetadata) {
this.repositoryMetadata = repositoryMetadata;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultPagingAndSortingRepository.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultPagingAndSortingRepository.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.domain.DataStoreAware;
import com.mmnaseri.utils.spring.data.store.DataStore;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import java.util.Collection;
import java.util.List;
/**
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (10/12/15)
*/
@SuppressWarnings("WeakerAccess")
public class DefaultPagingAndSortingRepository extends PagingAndSortingSupport
implements DataStoreAware {
private static final Log log = LogFactory.getLog(DefaultPagingAndSortingRepository.class);
private DataStore dataStore;
/**
* Finds everything and sorts it using the given sort property
*
* @param sort how to sort the data
* @return sorted entries, unless sort is null.
*/
public List findAll(Sort sort) {
return PagingAndSortingUtils.sort(retrieveAll(), sort);
}
/**
* Loads everything, sorts them, and pages the according to the spec.
*
* @param pageable the pagination and sort spec
* @return the specified view of the data
*/
public Page findAll(Pageable pageable) {
return page(retrieveAll(), pageable);
}
@Override
public void setDataStore(DataStore dataStore) {
this.dataStore = dataStore;
}
private Collection retrieveAll() {
log.info("Loading all the data in the data store");
return dataStore.retrieveAll();
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultQueryByExampleExecutor.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultQueryByExampleExecutor.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.domain.DataStoreAware;
import com.mmnaseri.utils.spring.data.domain.Invocation;
import com.mmnaseri.utils.spring.data.domain.Parameter;
import com.mmnaseri.utils.spring.data.domain.RepositoryMetadata;
import com.mmnaseri.utils.spring.data.domain.RepositoryMetadataAware;
import com.mmnaseri.utils.spring.data.domain.impl.ImmutableInvocation;
import com.mmnaseri.utils.spring.data.domain.impl.SelectDataStoreOperation;
import com.mmnaseri.utils.spring.data.error.InvalidArgumentException;
import com.mmnaseri.utils.spring.data.proxy.RepositoryConfiguration;
import com.mmnaseri.utils.spring.data.proxy.RepositoryConfigurationAware;
import com.mmnaseri.utils.spring.data.proxy.RepositoryFactoryConfiguration;
import com.mmnaseri.utils.spring.data.proxy.RepositoryFactoryConfigurationAware;
import com.mmnaseri.utils.spring.data.query.QueryDescriptor;
import com.mmnaseri.utils.spring.data.store.DataStore;
import com.mmnaseri.utils.spring.data.tools.PropertyUtils;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.ExampleMatcher;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
/**
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/8/16, 11:57 AM)
*/
public class DefaultQueryByExampleExecutor
implements DataStoreAware,
RepositoryConfigurationAware,
RepositoryMetadataAware,
RepositoryFactoryConfigurationAware {
private final ExampleMatcherQueryDescriptionExtractor queryDescriptionExtractor;
private DataStore<Object, Object> dataStore;
private RepositoryConfiguration repositoryConfiguration;
private RepositoryMetadata repositoryMetadata;
private RepositoryFactoryConfiguration configuration;
public DefaultQueryByExampleExecutor() {
queryDescriptionExtractor = new ExampleMatcherQueryDescriptionExtractor();
}
public Object findOne(Example example) {
final Collection found = retrieveAll(example);
if (found.isEmpty()) {
return null;
} else if (found.size() > 1) {
throw new InvalidArgumentException(
"Expected to see exactly one item found, but found "
+ found.size()
+ ". You should use a better example.");
}
return found.iterator().next();
}
public Iterable findAll(Example example) {
return retrieveAll(example);
}
public Iterable findAll(Example example, Sort sort) {
return PagingAndSortingUtils.sort(retrieveAll(example), sort);
}
public Page findAll(Example example, Pageable pageable) {
return PagingAndSortingUtils.page(retrieveAll(example), pageable);
}
public long count(Example example) {
return retrieveAll(example).size();
}
public boolean exists(Example example) {
return count(example) > 0;
}
@Override
public void setDataStore(DataStore dataStore) {
//noinspection unchecked
this.dataStore = dataStore;
}
@Override
public void setRepositoryConfiguration(RepositoryConfiguration repositoryConfiguration) {
this.repositoryConfiguration = repositoryConfiguration;
}
@Override
public void setRepositoryMetadata(RepositoryMetadata repositoryMetadata) {
this.repositoryMetadata = repositoryMetadata;
}
@Override
public void setRepositoryFactoryConfiguration(RepositoryFactoryConfiguration configuration) {
this.configuration = configuration;
}
/**
* Retrieves all entities that match the given example
*
* @param example the example for finding the entities
* @return a collection of matched entities
*/
private Collection<?> retrieveAll(Example example) {
final QueryDescriptor descriptor =
queryDescriptionExtractor.extract(repositoryMetadata, configuration, example);
final Invocation invocation = createInvocation(descriptor, example);
final SelectDataStoreOperation<Object, Object> select =
new SelectDataStoreOperation<>(descriptor);
return select.execute(dataStore, repositoryConfiguration, invocation);
}
/**
* This method will create an invocation that had it occurred on a query method would provide
* sufficient data for a parsed query method expression to be evaluated
*
* @param descriptor the query descriptor
* @param example the example that is used for evaluation
* @return the fake method invocation corresponding to the example probe
*/
private Invocation createInvocation(QueryDescriptor descriptor, Example example) {
final List<Object> values = new ArrayList<>();
// since according to
// http://docs.spring.io/spring-data/jpa/docs/current/reference/html/#query-by-example
// the matcher only supports AND condition, so, we expect to see only one branch
final List<List<Parameter>> branches = descriptor.getBranches();
final List<Parameter> parameters = branches.get(0);
for (Parameter parameter : parameters) {
final String propertyPath = parameter.getPath();
final Object propertyValue = PropertyUtils.getPropertyValue(example.getProbe(), propertyPath);
final ExampleMatcher.PropertySpecifier specifier =
example.getMatcher().getPropertySpecifiers().getForPath(propertyPath);
//noinspection ConstantConditions
values.add(
specifier == null
? propertyValue
: specifier
.getPropertyValueTransformer()
.apply(Optional.ofNullable(propertyValue))
.orElse(null));
}
return new ImmutableInvocation(null, values.toArray());
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultCrudRepository.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/DefaultCrudRepository.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.error.EntityMissingKeyException;
import com.mmnaseri.utils.spring.data.tools.PropertyUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
/**
* This class will provide implementations for the methods introduced by the Spring framework through {@link org.springframework.data.repository.CrudRepository}.
*
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (10/6/15)
*/
@SuppressWarnings({"unchecked", "WeakerAccess", "UnusedReturnValue"})
public class DefaultCrudRepository extends CrudRepositorySupport {
private static final Log log = LogFactory.getLog(DefaultCrudRepository.class);
/**
* Saves all the given entities
*
* @param entities entities to save (insert or update)
* @return saved entities
*/
public Iterable<Object> saveAll(Iterable entities) {
return save(entities);
}
/**
* Finds the entity that was saved with this key, or returns {@literal null}
*
* @param key the key
* @return the entity
*/
public Optional<Object> findById(Object key) {
log.info("Attempting to load the entity with key " + key);
return Optional.ofNullable(getDataStore().retrieve(key));
}
/**
* Checks whether the given key represents an entity in the data store
*
* @param key the key
* @return {@literal true} if the key is valid
*/
public boolean existsById(Object key) {
return getDataStore().hasKey(key);
}
/**
* Finds all the entities that match the given set of ids
*
* @param ids ids to look for
* @return entities that matched the ids.
*/
public Iterable findAllById(Iterable ids) {
final List entities = new LinkedList();
log.info("Looking for multiple entities for a number of ids");
log.debug(ids);
for (Object id : ids) {
final Optional<Object> found = findById(id);
if (found.isPresent()) {
log.trace(
"Entity found for key "
+ id
+ ", adding the found entity to the list of returned entity");
entities.add(found.get());
}
}
return entities;
}
/**
* Deletes the entity matching this entity's key from the data store
*
* @param entity the entity
* @return the deleted entity
* @throws EntityMissingKeyException if the passed entity doesn't have a key
*/
public Object deleteById(Object entity) {
Object retrieved = getDataStore().retrieve(entity);
log.info("Attempting to delete the entity with key " + entity);
if (retrieved == null) {
log.info("Object not found with key " + entity + ", try to find by identifier property");
try {
entity = PropertyUtils.getPropertyValue(entity, getRepositoryMetadata().getIdentifierProperty());
retrieved = getDataStore().retrieve(entity);
} catch (IllegalStateException exception) {
log.info("Serialized id doesn't have a identifier property");
}
}
getDataStore().delete(entity);
return retrieved;
}
/**
* Deletes the entity with the given id and returns the actual entity that was just deleted.
*
* @param id the id
* @return the entity that was deleted or {@literal null} if it wasn't found
*/
public Object delete(Object id) {
final Object key =
PropertyUtils.getPropertyValue(id, getRepositoryMetadata().getIdentifierProperty());
if (key == null) {
log.error("The entity that was supposed to be deleted, does not have a key");
throw new EntityMissingKeyException(
getRepositoryMetadata().getEntityType(), getRepositoryMetadata().getIdentifierProperty());
}
return deleteById(key);
}
/**
* Deletes all specified <em>entities</em> from the data store.
*
* @param entities the entities to delete
* @return the entities that were actually deleted
*/
public Iterable delete(Iterable entities) {
log.info("Attempting to delete multiple entities via entity objects themselves");
log.debug(entities);
final List list = new LinkedList();
for (Object entity : entities) {
final Object deleted = deleteById(entity);
if (deleted != null) {
log.debug(
"The entity was deleted successfully and will be added to the list of deleted entities");
list.add(deleted);
}
}
return list;
}
/**
* Deletes everything from the data store
*
* @return all the entities that were removed
*/
public Iterable deleteAll() {
log.info("Attempting to delete all entities at once");
final List list = new LinkedList();
final Collection keys = getDataStore().keys();
log.debug(
"There are "
+ keys.size()
+ " entities altogether in the data store that are going to be deleted");
for (Object key : keys) {
final Object deleted = deleteById((key));
if (deleted != null) {
log.debug(
"The entity was deleted successfully and will be added to the list of deleted entities");
list.add(deleted);
}
}
final Collection remainingKeys = getDataStore().keys();
log.debug(
"There are "
+ remainingKeys.size()
+ " keys remaining in the data store after the delete operation");
return list;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/PagingAndSortingUtils.java | spring-data-mock/src/main/java/com/mmnaseri/utils/spring/data/repository/PagingAndSortingUtils.java | package com.mmnaseri.utils.spring.data.repository;
import com.mmnaseri.utils.spring.data.domain.impl.PropertyComparator;
import com.mmnaseri.utils.spring.data.query.NullHandling;
import com.mmnaseri.utils.spring.data.query.Order;
import com.mmnaseri.utils.spring.data.query.SortDirection;
import com.mmnaseri.utils.spring.data.query.impl.ImmutableOrder;
import com.mmnaseri.utils.spring.data.query.impl.ImmutableSort;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
/**
* @author Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (4/28/16)
*/
final class PagingAndSortingUtils {
private static final Log log = LogFactory.getLog(PagingAndSortingUtils.class);
private PagingAndSortingUtils() {
throw new UnsupportedOperationException();
}
/**
* Finds everything and sorts it using the given sort property
*
* @param entries the entries to be sorted
* @param sort how to sort the data
* @return sorted entries, unless sort is null.
*/
public static List sort(Collection entries, Sort sort) {
//noinspection unchecked
final List list = new LinkedList(entries);
if (sort == null) {
log.info("No sort was specified, so we are just going to return the data as-is");
return list;
}
final List<Order> orders = new LinkedList<>();
for (Sort.Order order : sort) {
final SortDirection direction =
order.getDirection().equals(Sort.Direction.ASC)
? SortDirection.ASCENDING
: SortDirection.DESCENDING;
final NullHandling nullHandling;
switch (order.getNullHandling()) {
case NULLS_FIRST:
nullHandling = NullHandling.NULLS_FIRST;
break;
case NULLS_LAST:
nullHandling = NullHandling.NULLS_LAST;
break;
default:
nullHandling = NullHandling.DEFAULT;
break;
}
final Order derivedOrder = new ImmutableOrder(direction, order.getProperty(), nullHandling);
orders.add(derivedOrder);
}
log.info("Sorting the retrieved data: " + orders);
PropertyComparator.sort(list, new ImmutableSort(orders));
return list;
}
/**
* Loads everything, sorts them, and pages the according to the spec.
*
* @param entries the entries to be paged
* @param pageable the pagination and sort spec
* @return the specified view of the data
*/
public static Page page(Collection entries, Pageable pageable) {
final List<?> all;
if (pageable.getSort().isSorted()) {
log.info("The page specification requests sorting, so we are going to sort the data first");
all = sort(entries, pageable.getSort());
} else {
log.info(
"The page specification does not need sorting, so we are going to load the data as-is");
//noinspection unchecked
all = new LinkedList(entries);
}
int start = Math.max(0, pageable.getPageNumber() * pageable.getPageSize());
int end = start + pageable.getPageSize();
start = Math.min(start, all.size());
end = Math.min(end, all.size());
log.info(
"Trimming the selection down for page "
+ pageable.getPageNumber()
+ " to include items from "
+ start
+ " to "
+ end);
final List<?> selection = new LinkedList<>(all.subList(start, end));
//noinspection unchecked
return new PageImpl(selection, pageable, all.size());
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultUserServiceTest.java | spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultUserServiceTest.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Group;
import com.mmnaseri.utils.samples.spring.data.jpa.model.User;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.GroupRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.MembershipRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.UserRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.service.GroupService;
import com.mmnaseri.utils.spring.data.dsl.factory.RepositoryFactoryBuilder;
import com.mmnaseri.utils.spring.data.dsl.factory.Start;
import org.hamcrest.Matchers;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:11 PM)
*/
public class DefaultUserServiceTest {
private DefaultUserService service;
private UserRepository repository;
private GroupService groupService;
private GroupRepository groupRepository;
@BeforeMethod
public void setUp() {
final Start builder = RepositoryFactoryBuilder.builder();
groupRepository = builder.mock(GroupRepository.class);
final MembershipRepository membershipRepository = builder.mock(MembershipRepository.class);
groupService = new DefaultGroupService(groupRepository, membershipRepository);
repository = builder.mock(UserRepository.class);
service = new DefaultUserService(repository, groupService);
}
@Test
public void testCreatingAUser() {
assertThat(repository.count(), is(0L));
final String username = "milad";
final String email = "milad@mmnaseri.com";
final String password = "123456";
final User user = service.createUser(username, email, password);
assertThat(user, is(notNullValue()));
assertThat(user.getId(), is(notNullValue()));
assertThat(user.getUsername(), is(username));
assertThat(user.getEmail(), is(email));
assertThat(user.getPasswordHash(), is(not(password)));
assertThat(repository.count(), is(1L));
final User found = repository.findById(user.getId()).orElse(null);
assertThat(found, is(notNullValue()));
assertThat(found.getUsername(), is(username));
assertThat(found.getEmail(), is(email));
assertThat(found.getPasswordHash(), is(user.getPasswordHash()));
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testCreatingADuplicateUser() {
service.createUser("milad", "email1", "123456");
service.createUser("milad", "email2", "123456");
}
@Test
public void testLookingUpAUserByEmail() {
final String id = service.createUser("milad", "milad@domain.com", "123456").getId();
final User found = service.lookup("MILAD@domain.com");
assertThat(found, is(notNullValue()));
assertThat(found.getId(), is(id));
}
@Test
public void testLookingUpAUserByUsername() {
final String id = service.createUser("milad", "milad@domain.com", "123456").getId();
final User found = service.lookup("MILAD");
assertThat(found, is(notNullValue()));
assertThat(found.getId(), is(id));
}
@Test
public void testLookingForNonExistentUser() {
final User user = service.lookup("milad");
assertThat(user, is(nullValue()));
}
@Test
public void testAuthenticatingWithUsername() {
final String id = service.createUser("milad", "milad@domain.com", "123456").getId();
final User user = service.authenticate("Milad", "123456");
assertThat(user, is(notNullValue()));
assertThat(user.getId(), is(id));
}
@Test
public void testAuthenticatingWithEmail() {
final String id = service.createUser("milad", "milad@domain.com", "123456").getId();
final User user = service.authenticate("milad@DOMAIN.com", "123456");
assertThat(user, is(notNullValue()));
assertThat(user.getId(), is(id));
}
@Test
public void testAuthenticatingWithWrongHandle() {
service.createUser("milad", "milad@domain.com", "123456");
final User user = service.authenticate("milad@DOMAIN", "123456");
assertThat(user, is(nullValue()));
}
@Test
public void testAuthenticatingWithWrongPassword() {
service.createUser("milad", "milad@domain.com", "123456");
final User user = service.authenticate("milad", "987654");
assertThat(user, is(nullValue()));
}
@Test
public void testDeletingAUser() {
service.createUser("milad", "milad@mmaseri.com", "123456");
assertThat(repository.count(), is(1L));
service.deleteUser("milad");
assertThat(repository.count(), is(0L));
}
@Test
public void testChangingUserPassword() {
service.createUser("milad", "milad@mmnaseri.com", "123456");
assertThat(service.authenticate("milad", "123456"), is(notNullValue()));
service.updatePassword("milad", "123456", "987654");
assertThat(service.authenticate("milad", "123456"), is(nullValue()));
assertThat(service.authenticate("milad", "987654"), is(notNullValue()));
}
@Test
public void testDeletingAUserThatIsPartOfMultipleGroups() {
final User user = service.createUser("milad", "milad@mmnaseri.com", "123456");
groupService.join(groupService.createGroup("Group 1"), user);
groupService.join(groupService.createGroup("Group 2"), user);
groupService.join(groupService.createGroup("Group 3"), user);
groupService.join(groupService.createGroup("Group 4"), user);
assertThat(groupService.groups(user), hasSize(4));
service.deleteUser(user.getUsername());
assertThat(groupService.groups(user), is(Matchers.empty()));
assertThat(groupRepository.count(), is(4L));
}
@Test
public void testReadingDeactivatedUsers() {
User user = service.createUser("milad", "milad@mmnaseri.com", "123456");
Group group = groupService.createGroup("Group 1");
groupService.join(group, user);
assertThat(service.deactivatedGroups(user), is(empty()));
groupService.deactivate(group, user);
assertThat(service.deactivatedGroups(user), contains(group));
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultPlaneServiceTest.java | spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultPlaneServiceTest.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Plane;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.PlaneRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.utils.RepositoryConfigUtils;
import com.mmnaseri.utils.spring.data.dsl.mock.RepositoryMockBuilder;
import com.mmnaseri.utils.spring.data.proxy.RepositoryFactoryConfiguration;
import com.mmnaseri.utils.spring.data.store.DataStore;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:41 PM)
*/
public class DefaultPlaneServiceTest {
private PlaneRepository repository;
private DefaultPlaneService service;
@BeforeMethod
public void setUp() {
final RepositoryFactoryConfiguration configuration = RepositoryConfigUtils.getConfiguration();
repository =
new RepositoryMockBuilder().useConfiguration(configuration).mock(PlaneRepository.class);
service = new DefaultPlaneService(repository);
}
@AfterMethod
public void tearDown() {
final RepositoryFactoryConfiguration configuration = RepositoryConfigUtils.getConfiguration();
// because the configuration is now shared, it means that the data store registry is shared
// across all
// the tests, too.
// This is the same as using a shared database for doing all the tests. So, at the end of the
// tests we need
// to clear the database after us like using a regular data store
final DataStore<Object, Plane> dataStore =
configuration.getDataStoreRegistry().getDataStore(Plane.class);
dataStore.truncate();
}
@Test
public void testCreate() {
assertThat(repository.count(), is(0L));
final String model = "F-22";
final String serial = "123456";
final Long id = service.create(model, serial);
assertThat(id, is(notNullValue()));
final Plane loaded = repository.findById(id).orElse(null);
assertThat(loaded, is(notNullValue()));
assertThat(loaded.getModel(), is(model));
assertThat(loaded.getSerial(), is(serial));
}
@Test
public void testLookupById() {
final Plane entity = new Plane();
entity.setModel("Boeing 747");
entity.setSerial("123456");
entity.setCapacity(1000);
final Plane saved = repository.save(entity);
final String model = service.lookup(saved.getId());
assertThat(model, is(notNullValue()));
assertThat(model, is(entity.getModel()));
}
@Test
public void testLookupBySerial() {
final Plane entity = new Plane();
entity.setModel("Boeing 747");
entity.setSerial("123456");
entity.setCapacity(1000);
repository.save(entity);
final String model = service.lookup(entity.getSerial());
assertThat(model, is(notNullValue()));
assertThat(model, is(entity.getModel()));
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultCardServiceTest.java | spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultCardServiceTest.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Card;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.CardRepository;
import org.hamcrest.Matchers;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.List;
import static com.mmnaseri.utils.spring.data.dsl.factory.RepositoryFactoryBuilder.builder;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.is;
public class DefaultCardServiceTest {
private CardRepository repository;
private DefaultCardService service;
@BeforeMethod
public void setUp() {
repository = builder().mock(CardRepository.class);
service = new DefaultCardService(repository);
}
@Test
public void loadAllCardsWhenEmpty() {
List<Card> list = service.load();
assertThat(list, is(Matchers.<Card>empty()));
}
@Test
public void loadAllCards() {
Card a = new Card().setBlabla("a");
Card b = new Card().setBlabla("b");
Card c = new Card().setBlabla("c");
repository.save(b);
repository.save(c);
repository.save(a);
List<Card> list = service.load();
assertThat(list, Matchers.<Card>hasSize(3));
assertThat(list, contains(a, b, c));
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultGroupServiceTest.java | spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultGroupServiceTest.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Group;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Membership;
import com.mmnaseri.utils.samples.spring.data.jpa.model.User;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.GroupRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.MembershipRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.UserRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.service.GroupService;
import com.mmnaseri.utils.samples.spring.data.jpa.service.UserService;
import com.mmnaseri.utils.spring.data.dsl.factory.RepositoryFactoryBuilder;
import com.mmnaseri.utils.spring.data.dsl.factory.Start;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/30/16, 9:15 AM)
*/
public class DefaultGroupServiceTest {
private GroupService service;
private GroupRepository groupRepository;
private UserService userService;
private MembershipRepository membershipRepository;
@BeforeMethod
public void setUp() {
final Start builder = RepositoryFactoryBuilder.builder();
groupRepository = builder.mock(GroupRepository.class);
membershipRepository = builder.mock(MembershipRepository.class);
final UserRepository userRepository = builder.mock(UserRepository.class);
service = new DefaultGroupService(groupRepository, membershipRepository);
userService = new DefaultUserService(userRepository, service);
}
@Test
public void testCreatingAGroup() {
assertThat(groupRepository.count(), is(0L));
final String name = "My Group";
final Group group = service.createGroup(name);
assertThat(group, is(notNullValue()));
assertThat(group.getName(), is(name));
assertThat(groupRepository.count(), is(1L));
final Group found = groupRepository.findById(group.getId()).orElse(null);
assertThat(found, is(notNullValue()));
assertThat(found.getName(), is(name));
}
@Test
public void testDeletingAnEmptyGroup() {
Group group = new Group();
group.setName("My Group");
group = groupRepository.save(group);
service.deleteGroup(group);
assertThat(groupRepository.count(), is(0L));
}
@Test
public void testEstablishingMembership() {
Group group = new Group();
group.setName("My Group");
group = groupRepository.save(group);
final User user = userService.createUser("milad", "milad@domain.com", "123456");
service.join(group, user);
assertThat(membershipRepository.count(), is(1L));
final Membership membership = membershipRepository.findAll().get(0);
assertThat(membership, is(notNullValue()));
assertThat(membership.getGroup(), is(notNullValue()));
assertThat(membership.getGroup().getId(), is(group.getId()));
assertThat(membership.getUser(), is(notNullValue()));
assertThat(membership.getUser().getId(), is(user.getId()));
}
@Test
public void testBreakingAMembership() {
Group group = new Group();
group.setName("My Group");
group = groupRepository.save(group);
final User user = userService.createUser("milad", "milad@domain.com", "123456");
final Membership membership = new Membership();
membership.setUser(user);
membership.setGroup(group);
membershipRepository.save(membership);
service.leave(group, user);
assertThat(membershipRepository.count(), is(0L));
}
@Test
public void testListingGroupMembers() {
Group group = new Group();
group.setName("My Group");
group = groupRepository.save(group);
final User user = userService.createUser("milad", "milad@domain.com", "123456");
final Membership membership = new Membership();
membership.setUser(user);
membership.setGroup(group);
membershipRepository.save(membership);
final List<User> users = service.members(group);
assertThat(users, is(notNullValue()));
assertThat(users, hasSize(1));
assertThat(users.get(0), is(notNullValue()));
assertThat(users.get(0).getId(), is(user.getId()));
}
@Test
public void testListingUserGroups() {
Group group = new Group();
group.setName("My Group");
group = groupRepository.save(group);
final User user = userService.createUser("milad", "milad@domain.com", "123456");
final Membership membership = new Membership();
membership.setUser(user);
membership.setGroup(group);
membershipRepository.save(membership);
final List<Group> groups = service.groups(user);
assertThat(groups, is(notNullValue()));
assertThat(groups, hasSize(1));
assertThat(groups.get(0), is(notNullValue()));
assertThat(groups.get(0).getId(), is(group.getId()));
}
@Test
public void testDeletingAGroupWithMembers() {
Group group = new Group();
group.setName("My Group");
group = groupRepository.save(group);
final User user = userService.createUser("milad", "milad@domain.com", "123456");
final Membership membership = new Membership();
membership.setUser(user);
membership.setGroup(group);
membershipRepository.save(membership);
service.deleteGroup(group);
assertThat(groupRepository.count(), is(0L));
assertThat(membershipRepository.count(), is(0L));
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultCustomerServiceTest.java | spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultCustomerServiceTest.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Customer;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.CustomerRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.CustomerRepositoryExampleSupport;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import static com.mmnaseri.utils.spring.data.dsl.factory.RepositoryFactoryBuilder.builder;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
* @author Mohammad Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/12/16, 2:03 PM)
*/
public class DefaultCustomerServiceTest {
private DefaultCustomerService service;
private CustomerRepository repository;
@BeforeMethod
public void setUp() {
repository =
builder()
.usingImplementation(CustomerRepositoryExampleSupport.class)
.mock(CustomerRepository.class);
service = new DefaultCustomerService(repository);
}
@Test
public void testCustomerRegistration() {
final Date date = date(1988, 0, 1);
// let's make sure that the database is empty
assertThat(repository.count(), is(0L));
final String firstName = "Milad";
final String lastName = "Naseri";
// and then register a customer
final long id = service.register(firstName, lastName, date);
// after registration, we should have exactly one record
assertThat(repository.count(), is(1L));
// and we should be able to load the customer by it's ID
final Customer customer = repository.findById(id).orElse(null);
// and that customer should be the one we registered
assertThat(customer, is(notNullValue()));
assertThat(customer.getId(), is(id));
assertThat(customer.getFirstName(), is(firstName));
assertThat(customer.getLastName(), is(lastName));
assertThat(customer.getBirthday(), is(date));
}
@Test
public void testLoadingCustomerById() {
// let's save a customer to the database first
final Customer customer = createCustomer("Milad", "Naseri", date(1988, 1, 1));
// we should be able to locate that via the service
final Customer loaded = service.findCustomer(customer.getId());
assertThat(loaded, is(notNullValue()));
assertThat(loaded.getId(), is(customer.getId()));
assertThat(loaded.getBirthday(), is(customer.getBirthday()));
assertThat(loaded.getFirstName(), is(customer.getFirstName()));
assertThat(loaded.getLastName(), is(customer.getLastName()));
}
@Test
public void testLoadingCustomersByBirthday() {
// let's register three customers, two of which are born within [88/1/1 .. 89/12/28]
final Customer first = createCustomer("Milad", "Naseri", date(1988, 1, 1));
final Customer second = createCustomer("Zohreh", "Sadeghi", date(1989, 9, 22));
createCustomer("Hassan", "Naseri", date(1962, 4, 15));
// we should be able to look up these customers using the service
final List<Customer> list =
service.findCustomersByBirthday(date(1988, 1, 1), date(1989, 12, 28));
// and the customers should be the ones indicated above
assertThat(list, is(notNullValue()));
assertThat(list, hasSize(2));
assertThat(list, containsInAnyOrder(first, second));
}
@Test
public void testLoadingCustomersByFirstNameAndLastName() {
// let's save three customers ...
final Customer customer = createCustomer("Milad", "Naseri", date(1988, 1, 1));
createCustomer("Zohreh", "Sadeghi", date(1989, 9, 22));
createCustomer("Hassan", "Naseri", date(1962, 4, 15));
// ... and have the service look up one of them
final List<Customer> list = service.findCustomersByName("Milad", "Naseri");
assertThat(list, is(notNullValue()));
assertThat(list, hasSize(1));
assertThat(list.get(0), is(customer));
}
@Test
public void testLoadingCustomersByFirstNames() {
createCustomer("Milad", "Naseri", null);
final Customer customer = createCustomer("Mateusz", "Stefek", null);
final List<Customer> list = service.findCustomersByFirstNames(Collections.singleton("Mateusz"));
assertThat(list, is(notNullValue()));
assertThat(list, hasSize(1));
assertThat(list.get(0), is(customer));
}
@Test
public void testLookingUpByFirstNamePart() {
createCustomer("Milad", "Naseri", date(1988, 1, 1));
final Customer eric = createCustomer("Eric", "Deandrea", date(1999, 1, 1));
final List<Customer> list = service.findByFirstNamePart("IC");
assertThat(list, is(notNullValue()));
assertThat(list, hasSize(1));
assertThat(list.get(0), is(eric));
}
private Customer createCustomer(String firstName, String lastName, Date birthday) {
final Customer customer = new Customer();
customer.setFirstName(firstName);
customer.setLastName(lastName);
customer.setBirthday(birthday);
return repository.save(customer);
}
private Date date(int year, int month, int day) {
final Calendar calendar = new GregorianCalendar();
calendar.set(Calendar.YEAR, year);
calendar.set(Calendar.MONTH, month - 1);
calendar.set(Calendar.DATE, day);
calendar.set(Calendar.HOUR, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTime();
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/utils/RepositoryConfigUtils.java | spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/utils/RepositoryConfigUtils.java | package com.mmnaseri.utils.samples.spring.data.jpa.utils;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.SerialAwareEntityRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.SerialAwareEntityRepositorySupport;
import com.mmnaseri.utils.spring.data.domain.impl.key.ConfigurableSequentialLongKeyGenerator;
import com.mmnaseri.utils.spring.data.proxy.RepositoryFactoryConfiguration;
import static com.mmnaseri.utils.spring.data.dsl.factory.RepositoryFactoryBuilder.builder;
/**
* We are setting defaults on the configuration level, which we will be reusing later.
*
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:42 PM)
*/
public final class RepositoryConfigUtils {
private static final RepositoryFactoryConfiguration CONFIGURATION =
builder()
.honoringImplementation(
SerialAwareEntityRepository.class, SerialAwareEntityRepositorySupport.class)
.withDefaultKeyGenerator(new ConfigurableSequentialLongKeyGenerator())
.configure();
private RepositoryConfigUtils() {
throw new UnsupportedOperationException();
}
public static RepositoryFactoryConfiguration getConfiguration() {
return CONFIGURATION;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/SerialAwareEntityRepositorySupport.java | spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/SerialAwareEntityRepositorySupport.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.SerialAwareEntity;
import com.mmnaseri.utils.spring.data.domain.RepositoryAware;
import com.mmnaseri.utils.spring.data.error.DataOperationExecutionException;
import org.springframework.data.domain.Example;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:45 PM)
*/
public class SerialAwareEntityRepositorySupport
implements SerialAwareEntityRepository, RepositoryAware<JpaRepository> {
private JpaRepository repository;
@Override
public SerialAwareEntity lookupBySerial(String serial) {
final SerialAwareEntity probe = new SerialAwareEntity();
probe.setSerial(serial);
final Example<?> example = Example.of(probe);
final List found = repository.findAll(example);
if (found.isEmpty()) {
return null;
}
if (found.size() > 1) {
throw new DataOperationExecutionException("Expected only one instance to be found", null);
}
return (SerialAwareEntity) found.get(0);
}
@Override
public void setRepository(JpaRepository repository) {
this.repository = repository;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/CustomerRepositoryExampleSupport.java | spring-data-mock-sample-jpa/src/test/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/CustomerRepositoryExampleSupport.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Customer;
import com.mmnaseri.utils.spring.data.domain.RepositoryAware;
import org.springframework.data.domain.Example;
import java.util.List;
/**
* @author Mohammad Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/12/16, 5:30 PM)
*/
@SuppressWarnings("unused")
public class CustomerRepositoryExampleSupport implements RepositoryAware<CustomerRepository> {
private CustomerRepository repository;
public List<Customer> findByExample(Example<Customer> example) {
return repository.findAll(example);
}
@Override
public void setRepository(CustomerRepository repository) {
this.repository = repository;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/UserService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/UserService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Group;
import com.mmnaseri.utils.samples.spring.data.jpa.model.User;
import java.util.List;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:06 PM)
*/
@SuppressWarnings("unused")
public interface UserService {
User createUser(String username, String email, String password);
void updatePassword(String handle, String oldPassword, String newPassword);
void deleteUser(String handle);
User lookup(String handle);
User authenticate(String handle, String password);
List<Group> deactivatedGroups(User user);
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/CardService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/CardService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Card;
import java.util.List;
public interface CardService {
List<Card> load();
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/PlaneService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/PlaneService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:39 PM)
*/
@SuppressWarnings("unused")
public interface PlaneService {
Long create(String model, String serial);
String lookup(Long id);
String lookup(String serial);
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/GroupService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/GroupService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Group;
import com.mmnaseri.utils.samples.spring.data.jpa.model.User;
import java.util.List;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 5:26 PM)
*/
public interface GroupService {
Group createGroup(String name);
void deleteGroup(Group group);
void join(Group group, User user);
void leave(Group group, User user);
List<User> members(Group group);
List<Group> groups(User user);
void deactivate(Group group, User user);
List<Group> deactivatedGroups(User user);
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/CustomerService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/CustomerService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Customer;
import java.util.Collection;
import java.util.Date;
import java.util.List;
/**
* @author Mohammad Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/12/16, 1:51 PM)
*/
@SuppressWarnings("unused")
public interface CustomerService {
long register(String firstName, String lastName, Date birthday);
Customer findCustomer(long id);
List<Customer> findCustomersByBirthday(Date from, Date to);
List<Customer> findCustomersByName(String firstName, String lastName);
List<Customer> findCustomersByFirstNames(Collection<String> firstNames);
List<Customer> findByFirstNamePart(String part);
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultGroupService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultGroupService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Group;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Membership;
import com.mmnaseri.utils.samples.spring.data.jpa.model.User;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.GroupRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.MembershipRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.service.GroupService;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static java.util.stream.Collectors.toList;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/30/16, 9:16 AM)
*/
public class DefaultGroupService implements GroupService {
private final GroupRepository groupRepository;
private final MembershipRepository membershipRepository;
public DefaultGroupService(
GroupRepository groupRepository, MembershipRepository membershipRepository) {
this.groupRepository = groupRepository;
this.membershipRepository = membershipRepository;
}
@Override
public Group createGroup(String name) {
final Group group = new Group();
group.setName(name);
return groupRepository.save(group);
}
@Override
public void deleteGroup(Group group) {
final List<Membership> memberships = membershipRepository.findByGroup(group);
membershipRepository.deleteAll(memberships);
groupRepository.delete(group);
}
@Override
public void join(Group group, User user) {
if (membershipRepository.findByUserAndGroup(user, group).isPresent()) {
return;
}
final Membership membership = new Membership();
membership.setGroup(group);
membership.setUser(user);
membershipRepository.save(membership);
}
@Override
public void leave(Group group, User user) {
final Optional<Membership> membership = membershipRepository.findByUserAndGroup(user, group);
if (!membership.isPresent()) {
return;
}
membershipRepository.delete(membership.get());
}
@Override
public List<User> members(Group group) {
final List<Membership> memberships = membershipRepository.findByGroup(group);
final List<User> users = new ArrayList<>();
for (Membership membership : memberships) {
users.add(membership.getUser());
}
return users;
}
@Override
public List<Group> groups(User user) {
final List<Membership> memberships = membershipRepository.findByUser(user);
final List<Group> groups = new ArrayList<>();
for (Membership membership : memberships) {
groups.add(membership.getGroup());
}
return groups;
}
@Override
public void deactivate(final Group group, final User user) {
Optional<Membership> optional = membershipRepository.findByUserAndGroup(user, group);
if (!optional.isPresent()) {
return;
}
Membership membership = optional.get();
membership.setActive(false);
membershipRepository.save(membership);
}
@Override
public List<Group> deactivatedGroups(final User user) {
return membershipRepository.findAllByUserAndActive(user, false).stream()
.map(Membership::getGroup)
.collect(toList());
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultCustomerService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultCustomerService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Customer;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.CustomerRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.service.CustomerService;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.ExampleMatcher;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import static org.springframework.data.domain.ExampleMatcher.GenericPropertyMatchers.ignoreCase;
/**
* @author Mohammad Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/12/16, 1:55 PM)
*/
public class DefaultCustomerService implements CustomerService {
private final CustomerRepository repository;
public DefaultCustomerService(CustomerRepository repository) {
this.repository = repository;
}
public long register(String firstName, String lastName, Date birthday) {
final Customer customer = new Customer();
customer.setFirstName(firstName);
customer.setLastName(lastName);
customer.setBirthday(birthday);
return repository.save(customer).getId();
}
public Customer findCustomer(long id) {
return repository.findById(id).orElse(null);
}
public List<Customer> findCustomersByBirthday(Date from, Date to) {
return repository.findByBirthdayBetween(from, to);
}
public List<Customer> findCustomersByName(String firstName, String lastName) {
final Customer probe = new Customer();
probe.setFirstName(firstName);
probe.setLastName(lastName);
final ExampleMatcher matcher =
ExampleMatcher.matching()
.withMatcher("firstName", ignoreCase())
.withMatcher("lastName", ignoreCase());
final Example<Customer> example = Example.of(probe, matcher);
return repository.findByExample(example);
}
public List<Customer> findCustomersByFirstNames(Collection<String> firstNames) {
return repository.findByFirstNameIn(firstNames);
}
@Override
public List<Customer> findByFirstNamePart(final String part) {
return repository.findByFirstNameIgnoreCaseContaining(part);
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultPlaneService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultPlaneService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Plane;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.PlaneRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.service.PlaneService;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:39 PM)
*/
public class DefaultPlaneService implements PlaneService {
private final PlaneRepository repository;
public DefaultPlaneService(PlaneRepository repository) {
this.repository = repository;
}
@Override
public Long create(String model, String serial) {
final Plane plane = new Plane();
plane.setModel(model);
plane.setCapacity(100);
plane.setSerial(serial);
return repository.save(plane).getId();
}
@Override
public String lookup(Long id) {
return repository.findById(id).map(Plane::getModel).orElse(null);
}
@Override
public String lookup(String serial) {
final Plane plane = repository.lookupBySerial(serial);
if (plane == null) {
return null;
}
return plane.getModel();
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultCardService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultCardService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Card;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.CardRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.service.CardService;
import java.util.List;
public class DefaultCardService implements CardService {
private final CardRepository cardRepository;
public DefaultCardService(final CardRepository cardRepository) {
this.cardRepository = cardRepository;
}
@Override
public List<Card> load() {
return cardRepository.findAllByOrderByBlablaAsc();
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultUserService.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/service/impl/DefaultUserService.java | package com.mmnaseri.utils.samples.spring.data.jpa.service.impl;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Group;
import com.mmnaseri.utils.samples.spring.data.jpa.model.User;
import com.mmnaseri.utils.samples.spring.data.jpa.repository.UserRepository;
import com.mmnaseri.utils.samples.spring.data.jpa.service.GroupService;
import com.mmnaseri.utils.samples.spring.data.jpa.service.UserService;
import com.mmnaseri.utils.samples.spring.data.jpa.utils.EncryptionUtils;
import java.util.List;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:09 PM)
*/
public class DefaultUserService implements UserService {
private final UserRepository repository;
private final GroupService groupService;
public DefaultUserService(UserRepository repository, GroupService groupService) {
this.repository = repository;
this.groupService = groupService;
}
@Override
public User createUser(String username, String email, String password) {
if (repository.findByUsernameIgnoreCase(username) != null
|| repository.findByEmailIgnoreCase(email) != null) {
throw new IllegalArgumentException();
}
final User user = new User();
user.setUsername(username);
user.setEmail(email);
user.setPasswordHash(EncryptionUtils.encrypt(password));
return repository.save(user);
}
@Override
public void updatePassword(String handle, String oldPassword, String newPassword) {
final User user = authenticate(handle, oldPassword);
if (user == null) {
throw new IllegalStateException();
}
user.setPasswordHash(EncryptionUtils.encrypt(newPassword));
repository.save(user);
}
@Override
public void deleteUser(String handle) {
final User user = lookup(handle);
if (user == null) {
throw new IllegalStateException();
}
final List<Group> groups = groupService.groups(user);
for (Group group : groups) {
groupService.leave(group, user);
}
repository.delete(user);
}
@Override
public User lookup(String handle) {
return repository.findByUsernameOrEmailAllIgnoreCase(handle, handle);
}
@Override
public User authenticate(String handle, String password) {
final User user = lookup(handle);
if (user == null) {
return null;
}
if (user.getPasswordHash().equals(EncryptionUtils.encrypt(password))) {
return user;
}
return null;
}
@Override
public List<Group> deactivatedGroups(final User user) {
return groupService.deactivatedGroups(user);
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/SerialAwareEntity.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/SerialAwareEntity.java | package com.mmnaseri.utils.samples.spring.data.jpa.model;
import javax.persistence.MappedSuperclass;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:36 PM)
*/
@MappedSuperclass
public class SerialAwareEntity {
private String serial;
public String getSerial() {
return serial;
}
public void setSerial(String serial) {
this.serial = serial;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Group.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Group.java | package com.mmnaseri.utils.samples.spring.data.jpa.model;
import javax.persistence.Entity;
import javax.persistence.Id;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:03 PM)
*/
@SuppressWarnings("unused")
@Entity
public class Group {
@Id private String id;
private String name;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Membership.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Membership.java | package com.mmnaseri.utils.samples.spring.data.jpa.model;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:04 PM)
*/
@Entity
@SuppressWarnings("unused")
public class Membership {
@Id private String id;
@ManyToOne private User user;
@ManyToOne private Group group;
private Boolean active;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public Group getGroup() {
return group;
}
public void setGroup(Group group) {
this.group = group;
}
public Boolean getActive() {
return active;
}
public Membership setActive(final Boolean active) {
this.active = active;
return this;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Plane.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Plane.java | package com.mmnaseri.utils.samples.spring.data.jpa.model;
import javax.persistence.Entity;
import javax.persistence.Id;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:34 PM)
*/
@Entity
@SuppressWarnings("unused")
public class Plane extends SerialAwareEntity {
@Id private Long id;
private String model;
private int capacity;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public int getCapacity() {
return capacity;
}
public void setCapacity(int capacity) {
this.capacity = capacity;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Customer.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Customer.java | package com.mmnaseri.utils.samples.spring.data.jpa.model;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import java.util.Date;
/**
* @author Mohammad Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/12/16, 1:50 PM)
*/
@Entity
@SuppressWarnings("unused")
public class Customer {
@Id private Long id;
@Temporal(TemporalType.DATE)
private Date birthday;
private String firstName;
private String lastName;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Date getBirthday() {
return birthday;
}
public void setBirthday(Date birthday) {
this.birthday = birthday;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/User.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/User.java | package com.mmnaseri.utils.samples.spring.data.jpa.model;
import javax.persistence.Entity;
import javax.persistence.Id;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:03 PM)
*/
@Entity
@SuppressWarnings("unused")
public class User {
@Id private String id;
private String username;
private String email;
private String passwordHash;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPasswordHash() {
return passwordHash;
}
public void setPasswordHash(String passwordHash) {
this.passwordHash = passwordHash;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Card.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/model/Card.java | package com.mmnaseri.utils.samples.spring.data.jpa.model;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
public class Card {
@Id @GeneratedValue private Integer id;
private String blabla;
public Integer getId() {
return id;
}
public Card setId(final Integer id) {
this.id = id;
return this;
}
public String getBlabla() {
return blabla;
}
public Card setBlabla(final String blabla) {
this.blabla = blabla;
return this;
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/utils/EncryptionUtils.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/utils/EncryptionUtils.java | package com.mmnaseri.utils.samples.spring.data.jpa.utils;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:10 PM)
*/
public class EncryptionUtils {
public static String encrypt(String text) {
try {
final MessageDigest digest = MessageDigest.getInstance("SHA-1");
return new String(digest.digest(text.getBytes()));
} catch (NoSuchAlgorithmException e) {
return text;
}
}
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/UserRepository.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/UserRepository.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.User;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:05 PM)
*/
public interface UserRepository extends JpaRepository<User, String> {
User findByUsernameOrEmailAllIgnoreCase(String username, String email);
User findByUsernameIgnoreCase(String username);
User findByEmailIgnoreCase(String email);
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/PlaneRepository.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/PlaneRepository.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Plane;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:34 PM)
*/
public interface PlaneRepository
extends JpaRepository<Plane, Long>, SerialAwareEntityRepository<Plane> {}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/GroupRepository.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/GroupRepository.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Group;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:05 PM)
*/
public interface GroupRepository extends JpaRepository<Group, String> {}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/CardRepository.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/CardRepository.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Card;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
public interface CardRepository extends JpaRepository<Card, Integer> {
List<Card> findAllByOrderByBlablaAsc();
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/MembershipRepository.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/MembershipRepository.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Group;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Membership;
import com.mmnaseri.utils.samples.spring.data.jpa.model.User;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
import java.util.Optional;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/29/16, 4:06 PM)
*/
public interface MembershipRepository extends JpaRepository<Membership, String> {
List<Membership> findByUser(User user);
List<Membership> findByGroup(Group group);
Optional<Membership> findByUserAndGroup(User user, Group group);
List<Membership> findAllByUserAndActive(User user, boolean active);
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/CustomerRepository.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/CustomerRepository.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.Customer;
import org.springframework.data.domain.Example;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.Collection;
import java.util.Date;
import java.util.List;
/**
* @author Mohammad Milad Naseri (m.m.naseri@gmail.com)
* @since 1.0 (6/12/16, 1:53 PM)
*/
public interface CustomerRepository extends JpaRepository<Customer, Long> {
List<Customer> findByBirthdayBetween(Date from, Date to);
@SuppressWarnings("SpringDataMethodInconsistencyInspection") // This is covered in a custom impl.
List<Customer> findByExample(Example<Customer> probe);
List<Customer> findByFirstNameIn(Collection<String> firstNames);
List<Customer> findByFirstNameIgnoreCaseContaining(String substring);
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
mmnaseri/spring-data-mock | https://github.com/mmnaseri/spring-data-mock/blob/e8547729050c9454872a1038c23bb9b1288c8654/spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/SerialAwareEntityRepository.java | spring-data-mock-sample-jpa/src/main/java/com/mmnaseri/utils/samples/spring/data/jpa/repository/SerialAwareEntityRepository.java | package com.mmnaseri.utils.samples.spring.data.jpa.repository;
import com.mmnaseri.utils.samples.spring.data.jpa.model.SerialAwareEntity;
/**
* @author Milad Naseri (milad.naseri@cdk.com)
* @since 1.0 (6/14/16, 11:37 PM)
*/
public interface SerialAwareEntityRepository<E extends SerialAwareEntity> {
E lookupBySerial(String serial);
}
| java | MIT | e8547729050c9454872a1038c23bb9b1288c8654 | 2026-01-05T02:41:15.897176Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/service/src/main/java/com/aayushatharva/brotli4j/service/BrotliNativeProvider.java | service/src/main/java/com/aayushatharva/brotli4j/service/BrotliNativeProvider.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.service;
/**
* A service interface that signals that an implementor jar contains a native lib.
*/
public interface BrotliNativeProvider {
/**
* Gives the name of the platform that this provider contains a native brotli lib for
*
* @return The name of the native, e.g. linux-x86_64 or osx-aarch64
*/
String platformName();
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/windows-x86_64/src/main/java/com/aayushatharva/brotli4j/windows/x86_64/NativeLoader.java | natives/windows-x86_64/src/main/java/com/aayushatharva/brotli4j/windows/x86_64/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.windows.x86_64;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "windows-x86_64";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/linux-aarch64/src/main/java/com/aayushatharva/brotli4j/linux/aarch64/NativeLoader.java | natives/linux-aarch64/src/main/java/com/aayushatharva/brotli4j/linux/aarch64/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.linux.aarch64;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "linux-aarch64";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/osx-x86_64/src/main/java/com/aayushatharva/brotli4j/macos/x86_64/NativeLoader.java | natives/osx-x86_64/src/main/java/com/aayushatharva/brotli4j/macos/x86_64/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.macos.x86_64;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "osx-x86_64";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/linux-x86_64/src/main/java/com/aayushatharva/brotli4j/linux/x86_64/NativeLoader.java | natives/linux-x86_64/src/main/java/com/aayushatharva/brotli4j/linux/x86_64/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.linux.x86_64;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "linux-x86_64";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/osx-aarch64/src/main/java/com/aayushatharva/brotli4j/macos/aarch64/NativeLoader.java | natives/osx-aarch64/src/main/java/com/aayushatharva/brotli4j/macos/aarch64/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.macos.aarch64;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "osx-aarch64";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/linux-riscv64/src/main/java/com/aayushatharva/brotli4j/linux/riscv64/NativeLoader.java | natives/linux-riscv64/src/main/java/com/aayushatharva/brotli4j/linux/riscv64/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.linux.riscv64;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "linux-riscv64";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/windows-aarch64/src/main/java/com/aayushatharva/brotli4j/windows/aarch64/NativeLoader.java | natives/windows-aarch64/src/main/java/com/aayushatharva/brotli4j/windows/aarch64/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.windows.aarch64;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "windows-aarch64";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/linux-armv7/src/main/java/com/aayushatharva/brotli4j/linux/armv7/NativeLoader.java | natives/linux-armv7/src/main/java/com/aayushatharva/brotli4j/linux/armv7/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.linux.armv7;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "linux-armv7";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/linux-s390x/src/main/java/com/aayushatharva/brotli4j/linux/s390x/NativeLoader.java | natives/linux-s390x/src/main/java/com/aayushatharva/brotli4j/linux/s390x/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.linux.s390x;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "linux-s390x";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/natives/linux-ppc64le/src/main/java/com/aayushatharva/brotli4j/linux/ppc64le/NativeLoader.java | natives/linux-ppc64le/src/main/java/com/aayushatharva/brotli4j/linux/ppc64le/NativeLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.linux.ppc64le;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
/**
* Service class to access the native lib in a JPMS context
*/
public class NativeLoader implements BrotliNativeProvider {
@Override
public String platformName() {
return "linux-ppc64le";
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/test/java/com/aayushatharva/brotli4j/FileCompressAndDecompressTest.java | brotli4j/src/test/java/com/aayushatharva/brotli4j/FileCompressAndDecompressTest.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j;
import com.aayushatharva.brotli4j.decoder.Decoders;
import com.aayushatharva.brotli4j.encoder.Encoder;
import com.aayushatharva.brotli4j.encoder.Encoders;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.Unpooled;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Random;
import java.util.stream.Collectors;
class FileCompressAndDecompressTest {
@BeforeAll
static void load() {
Brotli4jLoader.ensureAvailability();
}
@Test
void bigFileCompressionAndDecompressionTest() throws IOException {
String fileName = "sample_data.txt";
try (InputStream in = getClass().getClassLoader().getResourceAsStream(fileName)) {
assert in != null;
String result = new BufferedReader(new InputStreamReader(in))
.lines()
.collect(Collectors.joining("\n"));
// Amplify the data 1536 times
int amplification = 1536;
StringBuilder sb = new StringBuilder(result.length() * amplification);
for (int i = 0; i < amplification; i++) {
sb.append(result);
}
byte[] data = sb.toString().getBytes();
ByteBuf originalData = Unpooled.wrappedBuffer(data);
ByteBuf compressedData = PooledByteBufAllocator.DEFAULT.directBuffer();
ByteBuf uncompressedResultData = PooledByteBufAllocator.DEFAULT.directBuffer(data.length);
Encoders.compress(originalData, compressedData);
Decoders.decompress(compressedData, uncompressedResultData);
Assertions.assertArrayEquals(data, ByteBufUtil.getBytes(uncompressedResultData));
}
}
@Test
void randomCharactersTest() throws IOException {
Random random = new Random();
byte[] chars = new byte[1024 * 100];
random.nextBytes(chars); // Random bytes cannot be compressed
byte[] compressed = Encoder.compress(chars, new Encoder.Parameters().setQuality(4));
Assertions.assertTrue(chars.length < compressed.length);
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/test/java/com/aayushatharva/brotli4j/Brotli4jLoaderTest.java | brotli4j/src/test/java/com/aayushatharva/brotli4j/Brotli4jLoaderTest.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
class Brotli4jLoaderTest {
@Test
void load() {
assertDoesNotThrow(Brotli4jLoader::ensureAvailability);
assertTrue(Brotli4jLoader.isAvailable());
assertNull(Brotli4jLoader.getUnavailabilityCause());
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/test/java/com/aayushatharva/brotli4j/encoder/EncoderTest.java | brotli4j/src/test/java/com/aayushatharva/brotli4j/encoder/EncoderTest.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.Brotli4jLoader;
import com.aayushatharva.brotli4j.common.BrotliCommon;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.Unpooled;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
class EncoderTest {
private static final byte[] compressedData = new byte[]{-117, 1, -128, 77, 101, 111, 119, 3};
@BeforeAll
static void load() {
Brotli4jLoader.ensureAvailability();
}
@Test
void compress() throws IOException {
assertArrayEquals(compressedData, Encoder.compress("Meow".getBytes()));
}
@Test
void compressWithQuality() throws IOException {
assertArrayEquals(compressedData, Encoder.compress("Meow".getBytes(), Encoder.Parameters.create(6)));
}
@Test
void compressWithQualityAndByteBuffer() throws IOException {
ByteBuf src = Unpooled.wrappedBuffer("Meow".getBytes());
ByteBuf dst = Unpooled.directBuffer();
Encoders.compress(src, dst);
assertArrayEquals(compressedData, ByteBufUtil.getBytes(dst));
}
@Test
void compressWithModes() throws IOException {
final byte[] text = "Some long text, very long text".getBytes();
final Encoder.Parameters parameters = new Encoder.Parameters();
final byte[] compressedGeneric = Encoder.compress(text, parameters.setMode(Encoder.Mode.GENERIC));
assertEquals(34, compressedGeneric.length);
final byte[] compressedText = Encoder.compress(text, parameters.setMode(Encoder.Mode.TEXT));
assertEquals(34, compressedText.length);
final byte[] compressedFont = Encoder.compress(text, parameters.setMode(Encoder.Mode.FONT));
assertEquals(31, compressedFont.length);
}
@Test
void encodeModeEnumValues() {
assertEquals(Encoder.Mode.FONT, Encoder.Mode.of(Encoder.Mode.FONT.ordinal()));
assertEquals(Encoder.Mode.TEXT, Encoder.Mode.of(Encoder.Mode.TEXT.ordinal()));
assertEquals(Encoder.Mode.GENERIC, Encoder.Mode.of(Encoder.Mode.GENERIC.ordinal()));
}
@Test
void ensureDictionaryDataRemainsAfterGC() throws IOException, InterruptedException {
// We hard code the compressed data, since the dictionary could also be collected just before our first compression
final byte[] expectedCompression = new byte[]{27, 43, 0, -8, 37, 0, -62, -104, -40, -63, 0};
final String dictionaryData = "This is some data to be used as a dictionary";
final byte[] rawBytes = dictionaryData.getBytes(); // Use dictionary also as data to keep it small
final PreparedDictionary dic = Encoder.prepareDictionary(BrotliCommon.makeNative(dictionaryData.getBytes()), 0);
// Create gc pressure to trigger potential collection of dictionary data
ArrayList<Integer> hashes = new ArrayList<>();
for (int i = 0; i < 1_000_000; i++) {
String obj = String.valueOf(Math.random());
hashes.add(obj.hashCode());
}
hashes = null;
System.gc();
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
BrotliOutputStream brotliOutputStream = new BrotliOutputStream(byteArrayOutputStream)) {
brotliOutputStream.attachDictionary(dic);
brotliOutputStream.write(rawBytes);
brotliOutputStream.close();
byteArrayOutputStream.close();
assertArrayEquals(expectedCompression, byteArrayOutputStream.toByteArray()); // Otherwise the GC already cleared the data
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/test/java/com/aayushatharva/brotli4j/encoder/BrotliOutputStreamTest.java | brotli4j/src/test/java/com/aayushatharva/brotli4j/encoder/BrotliOutputStreamTest.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.Brotli4jLoader;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
class BrotliOutputStreamTest {
private static final byte[] compressedData = new byte[]{-117, 1, -128, 77, 101, 111, 119, 3};
@BeforeAll
static void load() {
Brotli4jLoader.ensureAvailability();
}
@Test
void compress() throws IOException {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
BrotliOutputStream brotliOutputStream = new BrotliOutputStream(byteArrayOutputStream)) {
brotliOutputStream.write("Meow".getBytes());
brotliOutputStream.close();
byteArrayOutputStream.close();
assertArrayEquals(compressedData, byteArrayOutputStream.toByteArray());
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/test/java/com/aayushatharva/brotli4j/decoder/BrotliDecoderChannelTest.java | brotli4j/src/test/java/com/aayushatharva/brotli4j/decoder/BrotliDecoderChannelTest.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.Brotli4jLoader;
import com.aayushatharva.brotli4j.encoder.BrotliOutputStream;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
import java.nio.charset.StandardCharsets;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class BrotliDecoderChannelTest {
private static final byte[] compressedData = new byte[]{-117, 1, -128, 77, 101, 111, 119, 3};
@BeforeAll
static void load() {
Brotli4jLoader.ensureAvailability();
}
@Test
public void decompress() throws IOException {
BrotliDecoderChannel channel = new BrotliDecoderChannel(new EntireBufferByteChannel(ByteBuffer.wrap(compressedData)));
ByteBuffer output = ByteBuffer.allocate(2048);
channel.read(output);
output.flip();
String result = StandardCharsets.UTF_8.decode(output).toString();
assertEquals("Meow", result);
}
@Test
public void decompressLonger() throws IOException {
String data = "In ancient times cats were worshipped as gods; they have not forgotten this.";
ByteBuffer compressed;
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
BrotliOutputStream output = new BrotliOutputStream(baos)) {
output.write(data.getBytes(StandardCharsets.UTF_8));
output.close();
compressed = ByteBuffer.wrap(baos.toByteArray());
}
// Quick verification that it compressed as expected
assertEquals(59, compressed.remaining());
BrotliDecoderChannel channel = new BrotliDecoderChannel(new EntireBufferByteChannel(compressed));
ByteBuffer output = ByteBuffer.allocate(2048);
channel.read(output);
output.flip();
String result = StandardCharsets.UTF_8.decode(output).toString();
assertEquals(data, result);
}
@Test
public void decompressOneByteAtATime() throws IOException {
BrotliDecoderChannel channel = new BrotliDecoderChannel(new OneByteAtATimeByteChannel(ByteBuffer.wrap(compressedData)));
ByteBuffer output = ByteBuffer.allocate(2048);
channel.read(output);
output.flip();
String result = StandardCharsets.UTF_8.decode(output).toString();
assertEquals("Meow", result);
}
@Test
public void decompressOneByteAtATimeLonger() throws IOException {
String data = "In ancient times cats were worshipped as gods; they have not forgotten this.";
ByteBuffer compressed;
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
BrotliOutputStream output = new BrotliOutputStream(baos)) {
output.write(data.getBytes(StandardCharsets.UTF_8));
output.close();
compressed = ByteBuffer.wrap(baos.toByteArray());
}
// Quick verification that it compressed as expected
assertEquals(59, compressed.remaining());
BrotliDecoderChannel channel = new BrotliDecoderChannel(new OneByteAtATimeByteChannel(compressed));
ByteBuffer output = ByteBuffer.allocate(2048);
channel.read(output);
output.flip();
String result = StandardCharsets.UTF_8.decode(output).toString();
assertEquals(data, result);
}
private static class EntireBufferByteChannel implements ReadableByteChannel {
private final ByteBuffer buffer;
public EntireBufferByteChannel(ByteBuffer buffer) {
this.buffer = buffer.slice();
}
@Override
public int read(ByteBuffer dst) {
if (!buffer.hasRemaining())
return -1;
int pos = dst.position();
dst.put(buffer);
return dst.position() - pos;
}
@Override
public boolean isOpen() {
return true;
}
@Override
public void close() {
}
}
private static class OneByteAtATimeByteChannel implements ReadableByteChannel {
private final ByteBuffer buffer;
public OneByteAtATimeByteChannel(ByteBuffer buffer) {
this.buffer = buffer.slice();
}
@Override
public boolean isOpen() {
return true;
}
@Override
public void close() {
}
@Override
public int read(ByteBuffer dst) {
if (!buffer.hasRemaining())
return -1;
dst.put(buffer.get());
return 1;
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/test/java/com/aayushatharva/brotli4j/decoder/DecoderTest.java | brotli4j/src/test/java/com/aayushatharva/brotli4j/decoder/DecoderTest.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.Brotli4jLoader;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import static org.junit.jupiter.api.Assertions.assertEquals;
class DecoderTest {
private static final byte[] compressedData = new byte[]{-117, 1, -128, 77, 101, 111, 119, 3};
@BeforeAll
static void load() {
Brotli4jLoader.ensureAvailability();
}
@Test
void decompress() throws IOException {
DirectDecompress directDecompress = Decoder.decompress(compressedData);
assertEquals(DecoderJNI.Status.DONE, directDecompress.getResultStatus());
assertEquals("Meow", new String(directDecompress.getDecompressedData()));
}
@Test
void decompressWithByteBuffer() throws IOException {
ByteBuf src = Unpooled.wrappedBuffer(compressedData);
ByteBuf dst = Unpooled.directBuffer();
DirectDecompress directDecompress = Decoders.decompress(src, dst);
assertEquals(DecoderJNI.Status.DONE, directDecompress.getResultStatus());
assertEquals("Meow", new String(directDecompress.getDecompressedData()));
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/test/java/com/aayushatharva/brotli4j/decoder/BrotliInputStreamTest.java | brotli4j/src/test/java/com/aayushatharva/brotli4j/decoder/BrotliInputStreamTest.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.Brotli4jLoader;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import static org.junit.jupiter.api.Assertions.assertEquals;
class BrotliInputStreamTest {
private static final byte[] compressedData = new byte[]{-117, 1, -128, 77, 101, 111, 119, 3};
@BeforeAll
static void load() {
Brotli4jLoader.ensureAvailability();
}
@Test
void simpleDecompression() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ByteArrayInputStream bais = new ByteArrayInputStream(compressedData);
BrotliInputStream brotliInputStream = new BrotliInputStream(bais);
int read = brotliInputStream.read();
while (read > -1) { // -1 means EOF
baos.write(read);
read = brotliInputStream.read();
}
baos.close();
bais.close();
brotliInputStream.close();
assertEquals("Meow", baos.toString());
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/Brotli4jLoader.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/Brotli4jLoader.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j;
import com.aayushatharva.brotli4j.common.annotations.Local;
import com.aayushatharva.brotli4j.service.BrotliNativeProvider;
import java.io.File;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ServiceLoader;
/**
* Loads Brotli Native Library
*/
@Local
public class Brotli4jLoader {
private static final Throwable UNAVAILABILITY_CAUSE;
static {
Throwable cause = null;
String customPath = System.getProperty("brotli4j.library.path");
if (customPath != null) {
try {
System.load(customPath);
} catch (Throwable throwable) {
cause = throwable;
}
} else {
try {
System.loadLibrary("brotli");
} catch (Throwable t) {
try {
String nativeLibName = System.mapLibraryName("brotli");
String platform = getPlatform();
String libPath = "/lib/" + platform + '/' + nativeLibName;
File tempDir = new File(System.getProperty("java.io.tmpdir"), "com_aayushatharva_brotli4j_" + System.nanoTime());
tempDir.mkdir();
tempDir.deleteOnExit();
File tempFile = new File(tempDir, nativeLibName);
Class<?> loaderClassToUse = Brotli4jLoader.class; // Use this as a fallback for non-JPMS contexts
// In Java9+ with JPMS enabled, we need a class in the jar that contains the file to be able to access its content
ServiceLoader<BrotliNativeProvider> nativeProviders = ServiceLoader.load(BrotliNativeProvider.class, Brotli4jLoader.class.getClassLoader());
for (BrotliNativeProvider nativeProvider : nativeProviders) {
if (nativeProvider.platformName().equals(platform)) {
loaderClassToUse = nativeProvider.getClass();
break;
}
}
// Copy the native library to a temporary file and load it
try (InputStream in = loaderClassToUse.getResourceAsStream(libPath)) {
// If the library is not found, throw an exception.
if (in == null) {
throw new UnsatisfiedLinkError("Failed to find Brotli native library in classpath: " + libPath);
}
Files.copy(in, tempFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
System.load(tempFile.getAbsolutePath());
} finally {
tempFile.deleteOnExit();
}
} catch (Throwable throwable) {
cause = throwable;
}
}
}
UNAVAILABILITY_CAUSE = cause;
}
/**
* @return {@code true} if the Brotli native library is available else {@code false}.
*/
public static boolean isAvailable() {
return UNAVAILABILITY_CAUSE == null;
}
/**
* Ensure Brotli native library is available.
*
* @throws UnsatisfiedLinkError If unavailable.
*/
public static void ensureAvailability() {
if (UNAVAILABILITY_CAUSE != null) {
UnsatisfiedLinkError error = new UnsatisfiedLinkError("Failed to load Brotli native library");
error.initCause(UNAVAILABILITY_CAUSE);
throw error;
}
}
public static Throwable getUnavailabilityCause() {
return UNAVAILABILITY_CAUSE;
}
private static String getPlatform() {
String osName = System.getProperty("os.name");
String archName = System.getProperty("os.arch");
if ("Linux".equalsIgnoreCase(osName)) {
if ("amd64".equalsIgnoreCase(archName)) {
return "linux-x86_64";
} else if ("aarch64".equalsIgnoreCase(archName)) {
return "linux-aarch64";
} else if ("arm".equalsIgnoreCase(archName)) {
return "linux-armv7";
} else if ("s390x".equalsIgnoreCase(archName)) {
return "linux-s390x";
} else if ("ppc64le".equalsIgnoreCase(archName)) {
return "linux-ppc64le";
} else if ("riscv64".equalsIgnoreCase(archName)) {
return "linux-riscv64";
}
} else if (osName.startsWith("Windows")) {
if ("amd64".equalsIgnoreCase(archName)) {
return "windows-x86_64";
} else if ("aarch64".equalsIgnoreCase(archName)) {
return "windows-aarch64";
}
} else if (osName.startsWith("Mac")) {
if ("x86_64".equalsIgnoreCase(archName)) {
return "osx-x86_64";
} else if ("aarch64".equalsIgnoreCase(archName)) {
return "osx-aarch64";
}
}
throw new UnsupportedOperationException("Unsupported OS and Architecture: " + osName + ", " + archName);
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/BrotliNativeProvider.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/BrotliNativeProvider.java | package com.aayushatharva.brotli4j;
import com.aayushatharva.brotli4j.common.annotations.Internal;
/**
* @deprecated This interface is NO-OP now. It is superseded by {@link com.aayushatharva.brotli4j.service.BrotliNativeProvider}.
* However, we cannot remove this interface because it is part of the public API.
* <p>
* Also, this is an Internal API and should not be used by external users.
*/
@Deprecated
@Internal
public interface BrotliNativeProvider {
/**
* Do not use this method. It is superseded by {@link com.aayushatharva.brotli4j.service.BrotliNativeProvider#platformName()}.
* <p>
* This method is kept for backward compatibility. It will be removed in the future.
* <p>
*/
@Deprecated
default String platformName() {
throw new UnsupportedOperationException("This method is superseded by com.aayushatharva.brotli4j.service.BrotliNativeProvider#platformName()");
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/Encoders.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/Encoders.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.common.annotations.Local;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.Unpooled;
import io.netty.buffer.UnpooledDirectByteBuf;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Multiple encoding methods using Netty Buffer
* Make sure to add it as dependency before using this class
*
* @see <a href="https://search.maven.org/artifact/io.netty/netty-buffer/">Netty Buffer</a>
*/
@Local
public final class Encoders {
/**
* Encodes the given {@link ByteBuf}
*
* @param src {@link ByteBuf} source
* @param pooled If set to {@code true} then this method will return
* {@code PooledDirectByteBuf} else {@link UnpooledDirectByteBuf}
* @return If {@code pooled} is set to {@code true} then this method will return
* {@code PooledDirectByteBuf} else {@link UnpooledDirectByteBuf}
* @throws IOException Thrown in case of error during encoding
*/
public static ByteBuf compress(ByteBuf src, boolean pooled) throws IOException {
ByteBuf dst;
if (pooled) {
dst = PooledByteBufAllocator.DEFAULT.directBuffer();
} else {
dst = Unpooled.directBuffer();
}
compress(src, dst);
return dst;
}
/**
* Encodes the given {@link ByteBuf}
*
* @param src {@link ByteBuf} source
* @param dst {@link ByteBuf} destination
* @throws IOException Thrown in case of error during encoding
*/
public static void compress(ByteBuf src, ByteBuf dst) throws IOException {
compress(src, dst, Encoder.Parameters.DEFAULT);
}
/**
* Encodes the given {@link ByteBuffer}
*
* @param src {@link ByteBuffer} source
* @param dst {@link ByteBuffer} destination
* @throws IOException Thrown in case of error during encoding
*/
public static void compress(ByteBuffer src, ByteBuffer dst) throws IOException {
ByteBuf srcBuf = PooledByteBufAllocator.DEFAULT.directBuffer();
ByteBuf dstBuf = PooledByteBufAllocator.DEFAULT.directBuffer();
try {
srcBuf.writeBytes(src);
compress(srcBuf, dstBuf, Encoder.Parameters.DEFAULT);
} finally {
dst.put(dstBuf.nioBuffer());
srcBuf.release();
dstBuf.release();
}
}
/**
* Encodes the given {@link ByteBuffer}
*
* @param src {@link ByteBuffer} source
* @param dst {@link ByteBuffer} destination
* @param params {@link Encoder.Parameters} instance
* @throws IOException Thrown in case of error during encoding
*/
public static void compress(ByteBuffer src, ByteBuffer dst, Encoder.Parameters params) throws IOException {
ByteBuf srcBuf = PooledByteBufAllocator.DEFAULT.directBuffer();
ByteBuf dstBuf = PooledByteBufAllocator.DEFAULT.directBuffer();
try {
srcBuf.writeBytes(src);
compress(srcBuf, dstBuf, params);
} finally {
dst.put(dstBuf.nioBuffer());
srcBuf.release();
dstBuf.release();
}
}
/**
* Encodes the given {@link ByteBuf}
*
* @param src {@link ByteBuffer} source
* @param dst {@link ByteBuffer} destination
* @param params {@link Encoder.Parameters} instance
* @throws IOException Thrown in case of error during encoding
*/
public static void compress(ByteBuf src, ByteBuf dst, Encoder.Parameters params) throws IOException {
int readableBytes = src.readableBytes();
if (readableBytes == 0) {
dst.writeByte((byte) 6);
return;
}
EncoderJNI.Wrapper encoder = new EncoderJNI.Wrapper(readableBytes, params.quality(), params.lgwin(), params.mode());
try {
encoder.getInputBuffer().put(src.nioBuffer());
encoder.push(EncoderJNI.Operation.PROCESS, readableBytes);
while (true) {
if (!encoder.isSuccess()) {
throw new IOException("encoding failed");
} else if (encoder.hasMoreOutput()) {
ByteBuffer buffer = encoder.pull();
dst.writeBytes(buffer);
} else if (!encoder.isFinished()) {
encoder.push(EncoderJNI.Operation.FINISH, 0);
} else {
break;
}
}
} finally {
encoder.destroy();
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/EncoderJNI.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/EncoderJNI.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* JNI wrapper for brotli encoder.
*/
@Upstream
public class EncoderJNI {
private static native ByteBuffer nativeCreate(long[] context);
private static native void nativePush(long[] context, int length);
private static native ByteBuffer nativePull(long[] context);
private static native void nativeDestroy(long[] context);
private static native boolean nativeAttachDictionary(long[] context, ByteBuffer dictionary);
private static native ByteBuffer nativePrepareDictionary(ByteBuffer dictionary, long type);
private static native void nativeDestroyDictionary(ByteBuffer dictionary);
public enum Operation {
PROCESS,
FLUSH,
FINISH
}
private static class PreparedDictionaryImpl implements PreparedDictionary {
private ByteBuffer data;
/** Reference to (non-copied) LZ data. */
private ByteBuffer rawData;
private PreparedDictionaryImpl(ByteBuffer data, ByteBuffer rawData) {
this.data = data;
this.rawData = rawData;
}
@Override
public ByteBuffer getData() {
return data;
}
@Override
protected void finalize() throws Throwable {
try {
ByteBuffer data = this.data;
this.data = null;
this.rawData = null;
nativeDestroyDictionary(data);
} finally {
super.finalize();
}
}
}
/**
* Prepares raw or serialized dictionary for being used by encoder.
*
* @param dictionary raw / serialized dictionary data; MUST be direct
* @param sharedDictionaryType dictionary data type
*/
static PreparedDictionary prepareDictionary(ByteBuffer dictionary, int sharedDictionaryType) {
if (!dictionary.isDirect()) {
throw new IllegalArgumentException("only direct buffers allowed");
}
ByteBuffer dictionaryData = nativePrepareDictionary(dictionary, sharedDictionaryType);
if (dictionaryData == null) {
throw new IllegalStateException("OOM");
}
return new PreparedDictionaryImpl(dictionaryData, dictionary);
}
public static class Wrapper {
protected final long[] context = new long[5];
private final ByteBuffer inputBuffer;
private boolean fresh = true;
public Wrapper(int inputBufferSize, int quality, int lgwin, Encoder.Mode mode)
throws IOException {
if (inputBufferSize <= 0) {
throw new IOException("buffer size must be positive");
}
this.context[1] = inputBufferSize;
this.context[2] = quality;
this.context[3] = lgwin;
this.context[4] = mode != null ? mode.ordinal() : -1;
this.inputBuffer = nativeCreate(this.context);
if (this.context[0] == 0) {
throw new IOException("failed to initialize native brotli encoder");
}
this.context[1] = 1;
this.context[2] = 0;
this.context[3] = 0;
this.context[4] = 0;
}
public boolean attachDictionary(ByteBuffer dictionary) {
if (!dictionary.isDirect()) {
throw new IllegalArgumentException("only direct buffers allowed");
}
if (context[0] == 0) {
throw new IllegalStateException("brotli decoder is already destroyed");
}
if (!fresh) {
throw new IllegalStateException("decoding is already started");
}
return nativeAttachDictionary(context, dictionary);
}
public void push(Operation op, int length) {
if (length < 0) {
throw new IllegalArgumentException("negative block length");
}
if (context[0] == 0) {
throw new IllegalStateException("brotli encoder is already destroyed");
}
if (!isSuccess() || hasMoreOutput()) {
throw new IllegalStateException("pushing input to encoder in unexpected state");
}
if (hasRemainingInput() && length != 0) {
throw new IllegalStateException("pushing input to encoder over previous input");
}
context[1] = op.ordinal();
fresh = false;
nativePush(context, length);
}
public boolean isSuccess() {
return context[1] != 0;
}
public boolean hasMoreOutput() {
return context[2] != 0;
}
public boolean hasRemainingInput() {
return context[3] != 0;
}
public boolean isFinished() {
return context[4] != 0;
}
public ByteBuffer getInputBuffer() {
return inputBuffer;
}
public ByteBuffer pull() {
if (context[0] == 0) {
throw new IllegalStateException("brotli encoder is already destroyed");
}
if (!isSuccess() || !hasMoreOutput()) {
throw new IllegalStateException("pulling while data is not ready");
}
fresh = false;
return nativePull(context);
}
/**
* Releases native resources.
*/
public void destroy() {
if (context[0] == 0) {
throw new IllegalStateException("brotli encoder is already destroyed");
}
nativeDestroy(context);
context[0] = 0;
}
@Override
protected void finalize() throws Throwable {
if (context[0] != 0) {
/* TODO(eustas): log resource leak? */
destroy();
}
super.finalize();
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/BrotliOutputStream.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/BrotliOutputStream.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.channels.Channels;
/**
* Output stream that wraps native brotli encoder.
*/
@Upstream
public class BrotliOutputStream extends OutputStream {
/**
* The default internal buffer size used by the encoder.
*/
private static final int DEFAULT_BUFFER_SIZE = 16384;
private final Encoder encoder;
/**
* Creates a BrotliOutputStream.
*
* @param destination underlying destination
* @param params encoding settings
* @param bufferSize intermediate buffer size
* @throws IOException If any failure during initialization
*/
public BrotliOutputStream(OutputStream destination, Encoder.Parameters params, int bufferSize)
throws IOException {
this.encoder = new Encoder(Channels.newChannel(destination), params, bufferSize);
}
/**
* Creates a BrotliOutputStream.
*
* @param destination underlying destination
* @param params encoding settings
* @throws IOException If any failure during initialization
*/
public BrotliOutputStream(OutputStream destination, Encoder.Parameters params)
throws IOException {
this(destination, params, DEFAULT_BUFFER_SIZE);
}
/**
* Creates a BrotliOutputStream.
*
* @param destination underlying destination
* @throws IOException If any failure during initialization
*/
public BrotliOutputStream(OutputStream destination) throws IOException {
this(destination, new Encoder.Parameters());
}
public void attachDictionary(PreparedDictionary dictionary) throws IOException {
encoder.attachDictionary(dictionary);
}
@Override
public void close() throws IOException {
encoder.close();
}
@Override
public void flush() throws IOException {
if (encoder.closed) {
throw new IOException("write after close");
}
encoder.flush();
}
@Override
public void write(int b) throws IOException {
if (encoder.closed) {
throw new IOException("write after close");
}
while (!encoder.encode(EncoderJNI.Operation.PROCESS)) {
Thread.yield();
}
encoder.inputBuffer.put((byte) b);
}
@Override
public void write(byte[] b) throws IOException {
this.write(b, 0, b.length);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
if (encoder.closed) {
throw new IOException("write after close");
}
while (len > 0) {
if (!encoder.encode(EncoderJNI.Operation.PROCESS)) {
Thread.yield();
continue;
}
int limit;
while ((limit = Math.min(len, encoder.inputBuffer.remaining())) == 0) {
Thread.yield();
}
encoder.inputBuffer.put(b, off, limit);
off += limit;
len -= limit;
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/PreparedDictionaryGenerator.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/PreparedDictionaryGenerator.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
/**
* Java prepared (raw) dictionary producer.
*/
@Upstream
public class PreparedDictionaryGenerator {
private static final int MAGIC = 0xDEBCEDE0;
private static final long HASH_MULTIPLIER = 0x1fe35a7bd3579bd3L;
private static class PreparedDictionaryImpl implements PreparedDictionary {
private final ByteBuffer data;
private PreparedDictionaryImpl(ByteBuffer data) {
this.data = data;
}
@Override
public ByteBuffer getData() {
return data;
}
}
// Disallow instantiation.
private PreparedDictionaryGenerator() {
}
public static PreparedDictionary generate(ByteBuffer src) {
return generate(src, 17, 3, 40, 5);
}
public static PreparedDictionary generate(ByteBuffer src,
int bucketBits, int slotBits, int hashBits, int blockBits) {
((Buffer) src).clear(); // Just in case...
if (blockBits > 12) {
throw new IllegalArgumentException("blockBits is too big");
}
if (bucketBits >= 24) {
throw new IllegalArgumentException("bucketBits is too big");
}
if (bucketBits - slotBits >= 16) {
throw new IllegalArgumentException("slotBits is too small");
}
int bucketLimit = 1 << blockBits;
int numBuckets = 1 << bucketBits;
int numSlots = 1 << slotBits;
int slotMask = numSlots - 1;
int hashShift = 64 - bucketBits;
long hashMask = (~0L) >>> (64 - hashBits);
int sourceSize = src.capacity();
if (sourceSize < 8) {
throw new IllegalArgumentException("src is too short");
}
/* Step 1: create "bloated" hasher. */
short[] num = new short[numBuckets];
int[] bucketHeads = new int[numBuckets];
int[] nextBucket = new int[sourceSize];
long accumulator = 0;
for (int i = 0; i < 7; ++i) {
accumulator |= (src.get(i) & 0xFFL) << (8 * i);
}
accumulator <<= 8;
/* TODO(eustas): apply custom "store" order. */
for (int i = 0; i + 7 < sourceSize; ++i) {
accumulator = (accumulator >>> 8) | ((src.get(i + 7) & 0xFFL) << 56);
long h = (accumulator & hashMask) * HASH_MULTIPLIER;
int key = (int) (h >>> hashShift);
int count = num[key];
nextBucket[i] = (count == 0) ? -1 : bucketHeads[key];
bucketHeads[key] = i;
count++;
if (count > bucketLimit) {
count = bucketLimit;
}
num[key] = (short) count;
}
/* Step 2: find slot limits. */
int[] slotLimit = new int[numSlots];
int[] slotSize = new int[numSlots];
int totalItems = 0;
for (int i = 0; i < numSlots; ++i) {
boolean overflow = false;
slotLimit[i] = bucketLimit;
while (true) {
overflow = false;
int limit = slotLimit[i];
int count = 0;
for (int j = i; j < numBuckets; j += numSlots) {
int size = num[j];
/* Last chain may span behind 64K limit; overflow happens only if
we are about to use 0xFFFF+ as item offset. */
if (count >= 0xFFFF) {
overflow = true;
break;
}
if (size > limit) {
size = limit;
}
count += size;
}
if (!overflow) {
slotSize[i] = count;
totalItems += count;
break;
}
slotLimit[i]--;
}
}
/* Step 3: transfer data to "slim" hasher. */
int part0 = 6 * 4;
int part1 = numSlots * 4;
int part2 = numBuckets * 2;
int part3 = totalItems * 4;
int allocSize = part0 + part1 + part2 + part3 + sourceSize;
ByteBuffer flat = ByteBuffer.allocateDirect(allocSize);
ByteBuffer pointer = flat.slice();
pointer.order(ByteOrder.nativeOrder());
IntBuffer struct = pointer.asIntBuffer();
pointer.position(pointer.position() + part0);
IntBuffer slotOffsets = pointer.asIntBuffer();
pointer.position(pointer.position() + part1);
ShortBuffer heads = pointer.asShortBuffer();
pointer.position(pointer.position() + part2);
IntBuffer items = pointer.asIntBuffer();
pointer.position(pointer.position() + part3);
ByteBuffer sourceCopy = pointer.slice();
/* magic */
struct.put(0, MAGIC);
/* source_offset */
struct.put(1, totalItems);
/* source_size */
struct.put(2, sourceSize);
/* hash_bits */
struct.put(3, hashBits);
/* bucket_bits */
struct.put(4, bucketBits);
/* slot_bits */
struct.put(5, slotBits);
totalItems = 0;
for (int i = 0; i < numSlots; ++i) {
slotOffsets.put(i, totalItems);
totalItems += slotSize[i];
slotSize[i] = 0;
}
for (int i = 0; i < numBuckets; ++i) {
int slot = i & slotMask;
int count = num[i];
if (count > slotLimit[slot]) {
count = slotLimit[slot];
}
if (count == 0) {
heads.put(i, (short) 0xFFFF);
continue;
}
int cursor = slotSize[slot];
heads.put(i, (short) cursor);
cursor += slotOffsets.get(slot);
slotSize[slot] += count;
int pos = bucketHeads[i];
for (int j = 0; j < count; j++) {
items.put(cursor++, pos);
pos = nextBucket[pos];
}
cursor--;
items.put(cursor, items.get(cursor) | 0x80000000);
}
sourceCopy.put(src);
return new PreparedDictionaryImpl(flat);
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/BrotliEncoderChannel.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/BrotliEncoderChannel.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.WritableByteChannel;
/**
* WritableByteChannel that wraps native brotli encoder.
*/
@Upstream
public class BrotliEncoderChannel extends Encoder implements WritableByteChannel {
/**
* The default internal buffer size used by the decoder.
*/
private static final int DEFAULT_BUFFER_SIZE = 16384;
private final Object mutex = new Object();
/**
* Creates a BrotliEncoderChannel
*
* @param destination underlying destination
* @param params encoding settings
* @param bufferSize intermediate buffer size
* @throws IOException If any failure during initialization
*/
public BrotliEncoderChannel(WritableByteChannel destination, Encoder.Parameters params,
int bufferSize) throws IOException {
super(destination, params, bufferSize);
}
/**
* Creates a BrotliEncoderChannel
*
* @param destination underlying destination
* @param params encoding settings
* @throws IOException If any failure during initialization
*/
public BrotliEncoderChannel(WritableByteChannel destination, Encoder.Parameters params)
throws IOException {
this(destination, params, DEFAULT_BUFFER_SIZE);
}
/**
* Creates a BrotliEncoderChannel
*
* @param destination underlying destination
* @throws IOException If any failure during initialization
*/
public BrotliEncoderChannel(WritableByteChannel destination) throws IOException {
this(destination, new Encoder.Parameters());
}
@Override
public void attachDictionary(PreparedDictionary dictionary) throws IOException {
super.attachDictionary(dictionary);
}
@Override
public boolean isOpen() {
synchronized (mutex) {
return !closed;
}
}
@Override
public void close() throws IOException {
synchronized (mutex) {
super.close();
}
}
@Override
public int write(ByteBuffer src) throws IOException {
synchronized (mutex) {
if (closed) {
throw new ClosedChannelException();
}
int result = 0;
while (src.hasRemaining() && encode(EncoderJNI.Operation.PROCESS)) {
int limit = Math.min(src.remaining(), inputBuffer.remaining());
ByteBuffer slice = src.slice();
((Buffer) slice).limit(limit);
inputBuffer.put(slice);
result += limit;
((Buffer) src).position(src.position() + limit);
}
return result;
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/PreparedDictionary.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/PreparedDictionary.java | /* Copyright 2018 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.nio.ByteBuffer;
/**
* Prepared dictionary data provider.
*/
@Upstream
public interface PreparedDictionary {
ByteBuffer getData();
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/Encoder.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/encoder/Encoder.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.encoder;
import com.aayushatharva.brotli4j.common.annotations.Local;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import java.util.ArrayList;
import java.util.List;
/**
* Base class for OutputStream / Channel implementations.
*/
@Upstream
@Local
public class Encoder {
private final WritableByteChannel destination;
private final List<PreparedDictionary> dictionaries;
private final EncoderJNI.Wrapper encoder;
private ByteBuffer buffer;
final ByteBuffer inputBuffer;
boolean closed;
/**
* Creates a Encoder wrapper.
*
* @param destination underlying destination
* @param params encoding parameters
* @param inputBufferSize read buffer size
*/
public Encoder(WritableByteChannel destination, Parameters params, int inputBufferSize) throws IOException {
if (inputBufferSize <= 0) {
throw new IllegalArgumentException("buffer size must be positive");
}
if (destination == null) {
throw new NullPointerException("destination can not be null");
}
this.dictionaries = new ArrayList<>();
this.destination = destination;
this.encoder = new EncoderJNI.Wrapper(inputBufferSize, params.quality, params.lgwin, params.mode);
this.inputBuffer = this.encoder.getInputBuffer();
}
/*
* Encodes the given data buffer.
*
* @param data byte array to be compressed
* @param params {@link Parameters} instance
* @return compressed byte array
* @throws IOException If any failure during encoding
*/
@Upstream
public static byte[] compress(byte[] data, int offset, int length, Parameters params) throws IOException {
if (length == 0) {
byte[] empty = new byte[1];
empty[0] = 6;
return empty;
}
/* data.length > 0 */
EncoderJNI.Wrapper encoder = new EncoderJNI.Wrapper(length, params.quality, params.lgwin, params.mode);
ArrayList<byte[]> output = new ArrayList<>();
int totalOutputSize = 0;
try {
encoder.getInputBuffer().put(data, offset, length);
encoder.push(EncoderJNI.Operation.FINISH, length);
while (true) {
if (!encoder.isSuccess()) {
throw new IOException("encoding failed");
} else if (encoder.hasMoreOutput()) {
ByteBuffer buffer = encoder.pull();
byte[] chunk = new byte[buffer.remaining()];
buffer.get(chunk);
output.add(chunk);
totalOutputSize += chunk.length;
} else if (!encoder.isFinished()) {
encoder.push(EncoderJNI.Operation.FINISH, 0);
} else {
break;
}
}
} finally {
encoder.destroy();
}
if (output.size() == 1) {
return output.get(0);
}
byte[] result = new byte[totalOutputSize];
int resultOffset = 0;
for (byte[] chunk : output) {
System.arraycopy(chunk, 0, result, resultOffset, chunk.length);
resultOffset += chunk.length;
}
return result;
}
@Local
public static byte[] compress(byte[] data) throws IOException {
return compress(data, Parameters.DEFAULT);
}
@Upstream
/* Encodes the given data buffer. */
public static byte[] compress(byte[] data, Parameters params) throws IOException {
return compress(data, 0, data.length, params);
}
@Upstream
public static byte[] compress(byte[] data, int offset, int length) throws IOException {
return compress(data, offset, length, new Parameters());
}
/**
* Prepares raw or serialized dictionary for being used by encoder.
*
* @param dictionary raw / serialized dictionary data; MUST be direct
* @param sharedDictionaryType dictionary data type
* @return {@link PreparedDictionary} instance
*/
@Upstream
public static PreparedDictionary prepareDictionary(ByteBuffer dictionary, int sharedDictionaryType) {
return EncoderJNI.prepareDictionary(dictionary, sharedDictionaryType);
}
@Upstream
private void fail(String message) throws IOException {
try {
close();
} catch (IOException ex) {
/* Ignore */
}
throw new IOException(message);
}
@Upstream
public void attachDictionary(PreparedDictionary dictionary) throws IOException {
if (!encoder.attachDictionary(dictionary.getData())) {
fail("failed to attach dictionary");
}
// Reference to native prepared dictionary wrapper should be held till the end of encoding.
dictionaries.add(dictionary);
}
/**
* @param force repeat pushing until all output is consumed
* @return true if all encoder output is consumed
*/
@Upstream
boolean pushOutput(boolean force) throws IOException {
while (buffer != null) {
if (buffer.hasRemaining()) {
destination.write(buffer);
}
if (!buffer.hasRemaining()) {
buffer = null;
} else if (!force) {
return false;
}
}
return true;
}
/**
* @return true if there is space in inputBuffer.
*/
@Local
@Upstream
public boolean encode(EncoderJNI.Operation op) throws IOException {
boolean force = (op != EncoderJNI.Operation.PROCESS);
if (force) {
((Buffer) inputBuffer).limit(inputBuffer.position());
} else if (inputBuffer.hasRemaining()) {
return true;
}
boolean hasInput = true;
while (true) {
if (!encoder.isSuccess()) {
fail("encoding failed");
} else if (!pushOutput(force)) {
return false;
} else if (encoder.hasMoreOutput()) {
buffer = encoder.pull();
} else if (encoder.hasRemainingInput()) {
encoder.push(op, 0);
} else if (hasInput) {
encoder.push(op, inputBuffer.limit());
hasInput = false;
} else {
((Buffer) inputBuffer).clear();
return true;
}
}
}
@Local
@Upstream
public void flush() throws IOException {
encode(EncoderJNI.Operation.FLUSH);
}
@Upstream
void close() throws IOException {
if (closed) {
return;
}
closed = true;
try {
encode(EncoderJNI.Operation.FINISH);
} finally {
encoder.destroy();
destination.close();
}
}
/**
* <a href="https://www.brotli.org/encode.html#aa6f">...</a>
* See encode.h, typedef enum BrotliEncoderMode
* <p>
* <strong>Important</strong>: The ordinal value of the
* modes should be the same as the constant values in encode.h
*/
public enum Mode {
/**
* Default compression mode.
* In this mode compressor does not know anything in advance about the properties of the input.
*/
GENERIC,
/**
* Compression mode for UTF-8 formatted text input.
*/
TEXT,
/**
* Compression mode used in WOFF 2.0.
*/
FONT;
// see: https://www.gamlor.info/wordpress/2017/08/javas-enum-values-hidden-allocations/
private static final Mode[] ALL_VALUES = values();
public static Mode of(int value) {
return ALL_VALUES[value];
}
}
/**
* Brotli encoder settings.
*/
@Upstream
@Local
public static final class Parameters {
@Local
public static final Parameters DEFAULT = new Parameters();
private int quality = -1;
private int lgwin = -1;
private Mode mode;
public Parameters() {
}
/**
* @param quality compression quality, or -1 for default
* @return this instance
*/
public static Parameters create(int quality) {
return create(quality, -1);
}
/**
* @param quality compression quality, or -1 for default
* @param lgwin log2(LZ window size), or -1 for default
* @return this instance
*/
public static Parameters create(int quality, int lgwin) {
return create(quality, lgwin, null);
}
/**
* @param quality compression quality, or -1 for default
* @param lgwin log2(LZ window size), or -1 for default
* @param mode compression mode, or {@code null} for default
* @return this instance
*/
public static Parameters create(int quality, int lgwin, Mode mode) {
return new Parameters()
.setQuality(quality)
.setWindow(lgwin)
.setMode(mode);
}
/**
* @param quality compression quality, or -1 for default
* @return this instance
*/
public Parameters setQuality(int quality) {
if (quality < -1 || quality > 11) {
throw new IllegalArgumentException("quality should be in range [0, 11], or -1");
}
this.quality = quality;
return this;
}
/**
* @param lgwin log2(LZ window size), or -1 for default
* @return this instance
*/
public Parameters setWindow(int lgwin) {
if ((lgwin != -1) && ((lgwin < 10) || (lgwin > 24))) {
throw new IllegalArgumentException("lgwin should be in range [10, 24], or -1");
}
this.lgwin = lgwin;
return this;
}
/**
* @param mode compression mode, or {@code null} for default
* @return this instance
*/
public Parameters setMode(Mode mode) {
this.mode = mode;
return this;
}
public int quality() {
return quality;
}
public int lgwin() {
return lgwin;
}
public Mode mode() {
return mode;
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/common/Utils.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/common/Utils.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.common;
/**
* Utilities class for Brotli4j
*/
public final class Utils {
/**
* Returns the maximum compressed size for the given input size.
* <p></p>
* This method is based on the original implementation of the Brotli library and works only
* for direct compression, not stream compression. This is useful to allocating buffers for compressed data.
*
* @param input_size The input size.
* @return The maximum compressed size.
* @throws IllegalArgumentException If the input size is negative.
*/
public static int maxCompressedSize(int input_size) {
if (input_size < 0) {
throw new IllegalArgumentException("Input size cannot be negative");
}
/* [window bits / empty metadata] + N * [uncompressed] + [last empty] */
int num_large_blocks = input_size >> 14;
int overhead = 2 + 4 * num_large_blocks + 3 + 1;
int result = input_size + overhead;
if (input_size == 0) {
return 2;
}
return result < input_size ? 0 : result;
}
private Utils() {
// Prevent outside initialization
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/common/CommonJNI.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/common/CommonJNI.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.common;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.nio.ByteBuffer;
/**
* JNI wrapper for brotli common.
*/
@Upstream
class CommonJNI {
static native boolean nativeSetDictionaryData(ByteBuffer data);
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/common/BrotliCommon.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/common/BrotliCommon.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.common;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.Arrays;
/**
* JNI wrapper for brotli common.
*/
@Upstream
public class BrotliCommon {
public static final int RFC_DICTIONARY_SIZE = 122784;
/* 96cecd2ee7a666d5aa3627d74735b32a */
private static final byte[] RFC_DICTIONARY_MD5 = {
-106, -50, -51, 46, -25, -90, 102, -43, -86, 54, 39, -41, 71, 53, -77, 42
};
/* 72b41051cb61a9281ba3c4414c289da50d9a7640 */
private static final byte[] RFC_DICTIONARY_SHA_1 = {
114, -76, 16, 81, -53, 97, -87, 40, 27, -93, -60, 65, 76, 40, -99, -91, 13, -102, 118, 64
};
/* 20e42eb1b511c21806d4d227d07e5dd06877d8ce7b3a817f378f313653f35c70 */
private static final byte[] RFC_DICTIONARY_SHA_256 = {
32, -28, 46, -79, -75, 17, -62, 24, 6, -44, -46, 39, -48, 126, 93, -48,
104, 119, -40, -50, 123, 58, -127, 127, 55, -113, 49, 54, 83, -13, 92, 112
};
private static boolean isDictionaryDataSet;
private static final Object mutex = new Object();
/**
* Checks if the given checksum matches MD5 checksum of the RFC dictionary.
*
* @param digest digest byte array
* @return {@code true} if check was successful else {@code false}
*/
public static boolean checkDictionaryDataMd5(byte[] digest) {
return Arrays.equals(RFC_DICTIONARY_MD5, digest);
}
/**
* Checks if the given checksum matches SHA-1 checksum of the RFC dictionary.
*
* @param digest digest byte array
* @return {@code true} if check was successful else {@code false}
*/
public static boolean checkDictionaryDataSha1(byte[] digest) {
return Arrays.equals(RFC_DICTIONARY_SHA_1, digest);
}
/**
* Checks if the given checksum matches SHA-256 checksum of the RFC dictionary.
*
* @param digest digest byte array
* @return {@code true} if check was successful else {@code false}
*/
public static boolean checkDictionaryDataSha256(byte[] digest) {
return Arrays.equals(RFC_DICTIONARY_SHA_256, digest);
}
/**
* Copy bytes to a new direct ByteBuffer.
* <p>
* Direct byte buffers are used to supply native code with large data chunks.
*
* @param data byte array of data
* @return {@link ByteBuffer} instance
*/
public static ByteBuffer makeNative(byte[] data) {
ByteBuffer result = ByteBuffer.allocateDirect(data.length);
result.put(data);
return result;
}
/**
* Copies data and sets it to be brotli dictionary.
*
* @param data byte array of data
*/
public static void setDictionaryData(byte[] data) {
if (data.length != RFC_DICTIONARY_SIZE) {
throw new IllegalArgumentException("invalid dictionary size");
}
synchronized (mutex) {
if (isDictionaryDataSet) {
return;
}
setDictionaryData(makeNative(data));
}
}
/**
* Reads data and sets it to be brotli dictionary.
*
* @param src {@link InputStream} of dictionary data
* @throws IOException In case of error during processing dictionary
*/
public static void setDictionaryData(InputStream src) throws IOException {
synchronized (mutex) {
if (isDictionaryDataSet) {
return;
}
ByteBuffer copy = ByteBuffer.allocateDirect(RFC_DICTIONARY_SIZE);
byte[] buffer = new byte[4096];
int readBytes;
while ((readBytes = src.read(buffer)) != -1) {
if (copy.remaining() < readBytes) {
throw new IllegalArgumentException("invalid dictionary size");
}
copy.put(buffer, 0, readBytes);
}
if (copy.remaining() != 0) {
throw new IllegalArgumentException("invalid dictionary size " + copy.remaining());
}
setDictionaryData(copy);
}
}
/**
* Sets data to be brotli dictionary.
*
* @param data {@link ByteBuffer} dictionary data
*/
public static void setDictionaryData(ByteBuffer data) {
if (!data.isDirect()) {
throw new IllegalArgumentException("direct byte buffer is expected");
}
if (data.capacity() != RFC_DICTIONARY_SIZE) {
throw new IllegalArgumentException("invalid dictionary size");
}
synchronized (mutex) {
if (isDictionaryDataSet) {
return;
}
if (!CommonJNI.nativeSetDictionaryData(data)) {
throw new RuntimeException("setting dictionary failed");
}
isDictionaryDataSet = true;
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/common/annotations/Local.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/common/annotations/Local.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.common.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Class and methods annotated with {@link Local} are
* represents code which created locally and not in sync with
* Google Brotli upstream repository.
*/
@Target({ElementType.FIELD, ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.SOURCE)
public @interface Local {
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/common/annotations/Internal.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/common/annotations/Internal.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.common.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Class and methods annotated with {@link Internal} are
* for internal use and should not be used by user directly
* as they can change and break anytime.
*/
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.SOURCE)
public @interface Internal {
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/common/annotations/Upstream.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/common/annotations/Upstream.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.common.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Class and methods annotated with {@link Upstream} are
* directly pulled from Google Brotli upstream repository
* without any change in function and parameters.
*/
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.SOURCE)
public @interface Upstream {
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/Decoders.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/Decoders.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.common.annotations.Local;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Multiple decoding methods using Netty Buffer.
* Make sure to add it as dependency before using this class
*
* @see <a href="https://search.maven.org/artifact/io.netty/netty-buffer/">Netty Buffer</a>
*/
@Local
public final class Decoders {
/**
* Decodes the given data buffer.
*
* @param compressed {@link ByteBuffer} source - will be read in full (position == limit after this call).
* @param decompressed {@link ByteBuffer} destination - compressed data will be filled in beginning at position, up to remaining bytes; position is updated
* @return {@link DirectDecompress} instance - upon return, only the status code is still valid
* @throws IOException Thrown in case of error during decoding
*/
@Local
public static DirectDecompress decompress(ByteBuffer compressed, ByteBuffer decompressed) throws IOException {
ByteBuf src = PooledByteBufAllocator.DEFAULT.directBuffer();
ByteBuf dst = PooledByteBufAllocator.DEFAULT.buffer();
try {
src.writeBytes(compressed);
final DirectDecompress result = decompress(src, dst);
dst.readBytes(decompressed);
return result;
} finally {
src.release();
dst.release();
}
}
/**
* Decodes the given data buffer.
*
* @param compressed {@link ByteBuf} source
* @param decompressed {@link ByteBuf} destination
* @return {@link DirectDecompress} instance
* @throws IOException Thrown in case of error during encoding
*/
@Local
public static DirectDecompress decompress(ByteBuf compressed, ByteBuf decompressed) throws IOException {
int compressedBytes = compressed.readableBytes();
DecoderJNI.Wrapper decoder = new DecoderJNI.Wrapper(compressedBytes);
try {
decoder.getInputBuffer().put(compressed.nioBuffer());
decoder.push(compressedBytes);
while (decoder.getStatus() != DecoderJNI.Status.DONE) {
switch (decoder.getStatus()) {
case OK:
decoder.push(0);
break;
case NEEDS_MORE_OUTPUT:
ByteBuffer buffer = decoder.pull();
decompressed.writeBytes(buffer);
break;
case NEEDS_MORE_INPUT:
// Give decoder a chance to process the remaining of the buffered byte.
decoder.push(0);
// If decoder still needs input, this means that stream is truncated.
if (decoder.getStatus() == DecoderJNI.Status.NEEDS_MORE_INPUT) {
return new DirectDecompress(decoder.getStatus(), null, null);
}
break;
default:
return new DirectDecompress(decoder.getStatus(), null, null);
}
}
} finally {
decoder.destroy();
}
return new DirectDecompress(decoder.getStatus(), null, decompressed);
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/Decoder.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/Decoder.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.common.annotations.Local;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
import java.util.ArrayList;
/**
* Base class for InputStream / Channel implementations.
*/
@Upstream
@Local
public class Decoder {
private static final ByteBuffer EMPTY_BUFFER = ByteBuffer.allocate(0);
private final ReadableByteChannel source;
private final DecoderJNI.Wrapper decoder;
ByteBuffer buffer;
boolean closed;
boolean eager;
/**
* Creates a Decoder wrapper.
*
* @param source underlying source
* @param inputBufferSize read buffer size
* @throws IOException If any failure during initialization
*/
public Decoder(ReadableByteChannel source, int inputBufferSize)
throws IOException {
if (inputBufferSize <= 0) {
throw new IllegalArgumentException("buffer size must be positive");
}
if (source == null) {
throw new NullPointerException("source can not be null");
}
this.source = source;
this.decoder = new DecoderJNI.Wrapper(inputBufferSize);
}
/**
* Decodes the given data buffer.
*
* @param data byte array of data to be decoded
* @return {@link DirectDecompress} instance
* @throws IOException If an error occurs during decoding
*/
@Local
public static DirectDecompress decompress(byte[] data) throws IOException {
DecoderJNI.Wrapper decoder = new DecoderJNI.Wrapper(data.length);
ArrayList<byte[]> output = new ArrayList<>();
int totalOutputSize = 0;
try {
decoder.getInputBuffer().put(data);
decoder.push(data.length);
while (decoder.getStatus() != DecoderJNI.Status.DONE) {
switch (decoder.getStatus()) {
case OK:
decoder.push(0);
break;
case NEEDS_MORE_OUTPUT:
ByteBuffer buffer = decoder.pull();
byte[] chunk = new byte[buffer.remaining()];
buffer.get(chunk);
output.add(chunk);
totalOutputSize += chunk.length;
break;
case NEEDS_MORE_INPUT:
// Give decoder a chance to process the remaining of the buffered byte.
decoder.push(0);
// If decoder still needs input, this means that stream is truncated.
if (decoder.getStatus() == DecoderJNI.Status.NEEDS_MORE_INPUT) {
return new DirectDecompress(decoder.getStatus(), null, null);
}
break;
default:
return new DirectDecompress(decoder.getStatus(), null, null);
}
}
} finally {
decoder.destroy();
}
if (output.size() == 1) {
return new DirectDecompress(decoder.getStatus(), output.get(0), null);
}
byte[] result = new byte[totalOutputSize];
int offset = 0;
for (byte[] chunk : output) {
System.arraycopy(chunk, 0, result, offset, chunk.length);
offset += chunk.length;
}
return new DirectDecompress(decoder.getStatus(), result, null);
}
@Upstream
private void fail(String message) throws IOException {
try {
close();
} catch (IOException ex) {
/* Ignore */
}
throw new IOException(message);
}
@Upstream
void attachDictionary(ByteBuffer dictionary) throws IOException {
if (!decoder.attachDictionary(dictionary)) {
fail("failed to attach dictionary");
}
}
@Upstream
public void enableEagerOutput() {
this.eager = true;
}
/**
* Continue decoding.
*
* @return -1 if stream is finished, or number of bytes available in read buffer (> 0)
*/
@Upstream
int decode() throws IOException {
while (true) {
if (buffer != null) {
if (!buffer.hasRemaining()) {
buffer = null;
} else {
return buffer.remaining();
}
}
switch (decoder.getStatus()) {
case DONE:
return -1;
case OK:
decoder.push(0);
break;
case NEEDS_MORE_INPUT:
// In "eager" more pulling preempts pushing.
if (eager && decoder.hasOutput()) {
buffer = decoder.pull();
break;
}
ByteBuffer inputBuffer = decoder.getInputBuffer();
((Buffer) inputBuffer).clear();
int bytesRead = source.read(inputBuffer);
if (bytesRead == -1) {
fail("unexpected end of input");
}
if (bytesRead == 0) {
// No input data is currently available.
buffer = EMPTY_BUFFER;
return 0;
}
decoder.push(bytesRead);
break;
case NEEDS_MORE_OUTPUT:
buffer = decoder.pull();
break;
default:
fail("corrupted input");
}
}
}
@Upstream
void discard(int length) {
((Buffer) buffer).position(buffer.position() + length);
if (!buffer.hasRemaining()) {
buffer = null;
}
}
@Upstream
int consume(ByteBuffer dst) {
ByteBuffer slice = buffer.slice();
int limit = Math.min(slice.remaining(), dst.remaining());
((Buffer) slice).limit(limit);
dst.put(slice);
discard(limit);
return limit;
}
@Upstream
void close() throws IOException {
if (closed) {
return;
}
closed = true;
decoder.destroy();
source.close();
}
/** Decodes the given data buffer starting at offset till length. */
@Upstream
public static byte[] decompress(byte[] data, int offset, int length) throws IOException {
DecoderJNI.Wrapper decoder = new DecoderJNI.Wrapper(length);
ArrayList<byte[]> output = new ArrayList<>();
int totalOutputSize = 0;
try {
decoder.getInputBuffer().put(data, offset, length);
decoder.push(length);
while (decoder.getStatus() != DecoderJNI.Status.DONE) {
switch (decoder.getStatus()) {
case OK:
decoder.push(0);
break;
case NEEDS_MORE_OUTPUT:
ByteBuffer buffer = decoder.pull();
byte[] chunk = new byte[buffer.remaining()];
buffer.get(chunk);
output.add(chunk);
totalOutputSize += chunk.length;
break;
case NEEDS_MORE_INPUT:
// Give decoder a chance to process the remaining of the buffered byte.
decoder.push(0);
// If decoder still needs input, this means that stream is truncated.
if (decoder.getStatus() == DecoderJNI.Status.NEEDS_MORE_INPUT) {
throw new IOException("corrupted input");
}
break;
default:
throw new IOException("corrupted input");
}
}
} finally {
decoder.destroy();
}
if (output.size() == 1) {
return output.get(0);
}
byte[] result = new byte[totalOutputSize];
int resultOffset = 0;
for (byte[] chunk : output) {
System.arraycopy(chunk, 0, result, resultOffset, chunk.length);
resultOffset += chunk.length;
}
return result;
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/BrotliDecoderChannel.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/BrotliDecoderChannel.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.ReadableByteChannel;
/**
* ReadableByteChannel that wraps native brotli decoder.
*/
@Upstream
public class BrotliDecoderChannel extends Decoder implements ReadableByteChannel {
/**
* The default internal buffer size used by the decoder.
*/
private static final int DEFAULT_BUFFER_SIZE = 16384;
private final Object mutex = new Object();
/**
* Creates a BrotliDecoderChannel.
*
* @param source underlying source
* @throws IOException If any failure during initialization
*/
public BrotliDecoderChannel(ReadableByteChannel source) throws IOException {
this(source, DEFAULT_BUFFER_SIZE);
}
/**
* Creates a BrotliDecoderChannel.
*
* @param source underlying source
* @param bufferSize intermediate buffer size
* @throws IOException If any failure during initialization
*/
public BrotliDecoderChannel(ReadableByteChannel source, int bufferSize) throws IOException {
super(source, bufferSize);
}
@Override
public void attachDictionary(ByteBuffer dictionary) throws IOException {
super.attachDictionary(dictionary);
}
@Override
public boolean isOpen() {
synchronized (mutex) {
return !closed;
}
}
@Override
public void close() throws IOException {
synchronized (mutex) {
super.close();
}
}
@Override
public int read(ByteBuffer dst) throws IOException {
synchronized (mutex) {
if (closed) {
throw new ClosedChannelException();
}
int result = 0;
while (dst.hasRemaining()) {
int outputSize = decode();
if (outputSize <= 0) {
return result == 0 ? outputSize : result;
}
result += consume(dst);
}
return result;
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/DirectDecompress.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/DirectDecompress.java | /*
* Copyright (c) 2020-2025, Aayush Atharva
*
* Brotli4j licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.common.annotations.Local;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.Unpooled;
import java.io.IOException;
/**
* Directly decompresses data using {@link Decoder#decompress(byte[])}
*/
@Local
public final class DirectDecompress {
private final DecoderJNI.Status resultStatus;
private byte[] decompressedData;
private ByteBuf byteBuf;
DirectDecompress(DecoderJNI.Status resultStatus, byte[] decompressedData, ByteBuf byteBuf) {
this.resultStatus = resultStatus;
this.decompressedData = decompressedData;
this.byteBuf = byteBuf;
}
/**
* Initiate direct decompression of data
*
* @param compressedData Compressed data as Byte Array
* @return {@link DirectDecompress} Instance
* @throws IOException In case of some error during decompression
*/
public static DirectDecompress decompress(byte[] compressedData) throws IOException {
return Decoder.decompress(compressedData);
}
/**
* Get the result of decompression.
*
* @return {@link DecoderJNI.Status}
*/
public DecoderJNI.Status getResultStatus() {
return resultStatus;
}
/**
* Get decompressed data.
*
* @return {@code byte} array if decompression was successful else {@code null}
*/
public byte[] getDecompressedData() {
// If byte array is null but bytebuffer is not null
// then convert bytebuffer to byte array and return.
if (decompressedData == null && byteBuf != null) {
this.decompressedData = ByteBufUtil.getBytes(byteBuf);
}
return decompressedData;
}
/**
* Get decompressed data.
*
* @return {@link ByteBuf} if decompression was successful else {@code null}
*/
public ByteBuf getDecompressedDataByteBuf() {
if (byteBuf == null && decompressedData != null) {
this.byteBuf = Unpooled.wrappedBuffer(decompressedData);
}
return byteBuf;
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/BrotliInputStream.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/BrotliInputStream.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
/**
* InputStream that wraps native brotli decoder.
*/
@Upstream
public class BrotliInputStream extends InputStream {
/**
* The default internal buffer size used by the decoder.
*/
private static final int DEFAULT_BUFFER_SIZE = 16384;
private final Decoder decoder;
/**
* Creates a BrotliInputStream
*
* @param source underlying source
* @param bufferSize intermediate buffer size
* @throws IOException If any failure during initialization
*/
public BrotliInputStream(InputStream source, int bufferSize)
throws IOException {
this.decoder = new Decoder(Channels.newChannel(source), bufferSize);
}
/**
* Creates a BrotliInputStream
*
* @param source underlying source
* @throws IOException If any failure during initialization
*/
public BrotliInputStream(InputStream source) throws IOException {
this(source, DEFAULT_BUFFER_SIZE);
}
/**
* Creates a BrotliInputStream
*
* @param dictionary {@link ByteBuffer} containing dictionary
* @throws IOException If any failure during initialization
*/
public void attachDictionary(ByteBuffer dictionary) throws IOException {
decoder.attachDictionary(dictionary);
}
public void enableEagerOutput() {
decoder.enableEagerOutput();
}
@Override
public void close() throws IOException {
decoder.close();
}
@Override
public int available() {
return (decoder.buffer != null) ? decoder.buffer.remaining() : 0;
}
@Override
public int read() throws IOException {
if (decoder.closed) {
throw new IOException("read after close");
}
int decoded;
// Iterate until at least one byte is decoded, or EOF reached.
while (true) {
decoded = decoder.decode();
if (decoded != 0) {
break;
}
Thread.yield();
}
if (decoded == -1) {
return -1;
}
return decoder.buffer.get() & 0xFF;
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (decoder.closed) {
throw new IOException("read after close");
}
if (decoder.decode() == -1) {
return -1;
}
int result = 0;
while (len > 0) {
int limit;
while ((limit = Math.min(len, decoder.buffer.remaining())) == 0) {
Thread.yield();
}
decoder.buffer.get(b, off, limit);
off += limit;
len -= limit;
result += limit;
if (decoder.decode() == -1) {
break;
}
}
return result;
}
@Override
public long skip(long n) throws IOException {
if (decoder.closed) {
throw new IOException("read after close");
}
long result = 0;
while (n > 0) {
if (decoder.decode() == -1) {
break;
}
int limit = (int) Math.min(n, decoder.buffer.remaining());
decoder.discard(limit);
result += limit;
n -= limit;
}
return result;
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
hyperxpro/Brotli4j | https://github.com/hyperxpro/Brotli4j/blob/d65b3d2529c533b16cea08dd7c367cffa1594767/brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/DecoderJNI.java | brotli4j/src/main/java/com/aayushatharva/brotli4j/decoder/DecoderJNI.java | /* Copyright 2017 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
package com.aayushatharva.brotli4j.decoder;
import com.aayushatharva.brotli4j.common.annotations.Upstream;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* JNI wrapper for brotli decoder.
*/
@Upstream
public class DecoderJNI {
private static native ByteBuffer nativeCreate(long[] context);
private static native void nativePush(long[] context, int length);
private static native ByteBuffer nativePull(long[] context);
private static native void nativeDestroy(long[] context);
private static native boolean nativeAttachDictionary(long[] context, ByteBuffer dictionary);
public enum Status {
ERROR,
DONE,
NEEDS_MORE_INPUT,
NEEDS_MORE_OUTPUT,
OK
}
public static class Wrapper {
private final long[] context = new long[3];
private final ByteBuffer inputBuffer;
private Status lastStatus = Status.NEEDS_MORE_INPUT;
private boolean fresh = true;
public Wrapper(int inputBufferSize) throws IOException {
this.context[1] = inputBufferSize;
this.inputBuffer = nativeCreate(this.context);
if (this.context[0] == 0) {
throw new IOException("failed to initialize native brotli decoder");
}
}
public boolean attachDictionary(ByteBuffer dictionary) {
if (!dictionary.isDirect()) {
throw new IllegalArgumentException("only direct buffers allowed");
}
if (context[0] == 0) {
throw new IllegalStateException("brotli decoder is already destroyed");
}
if (!fresh) {
throw new IllegalStateException("decoding is already started");
}
return nativeAttachDictionary(context, dictionary);
}
public void push(int length) {
if (length < 0) {
throw new IllegalArgumentException("negative block length");
}
if (context[0] == 0) {
throw new IllegalStateException("brotli decoder is already destroyed");
}
if (lastStatus != Status.NEEDS_MORE_INPUT && lastStatus != Status.OK) {
throw new IllegalStateException("pushing input to decoder in " + lastStatus + " state");
}
if (lastStatus == Status.OK && length != 0) {
throw new IllegalStateException("pushing input to decoder in OK state");
}
fresh = false;
nativePush(context, length);
parseStatus();
}
private void parseStatus() {
long status = context[1];
if (status == 1) {
lastStatus = Status.DONE;
} else if (status == 2) {
lastStatus = Status.NEEDS_MORE_INPUT;
} else if (status == 3) {
lastStatus = Status.NEEDS_MORE_OUTPUT;
} else if (status == 4) {
lastStatus = Status.OK;
} else {
lastStatus = Status.ERROR;
}
}
public Status getStatus() {
return lastStatus;
}
public ByteBuffer getInputBuffer() {
return inputBuffer;
}
public boolean hasOutput() {
return context[2] != 0;
}
public ByteBuffer pull() {
if (context[0] == 0) {
throw new IllegalStateException("brotli decoder is already destroyed");
}
if (lastStatus != Status.NEEDS_MORE_OUTPUT && !hasOutput()) {
throw new IllegalStateException("pulling output from decoder in " + lastStatus + " state");
}
fresh = false;
ByteBuffer result = nativePull(context);
parseStatus();
return result;
}
/**
* Releases native resources.
*/
public void destroy() {
if (context[0] == 0) {
throw new IllegalStateException("brotli decoder is already destroyed");
}
nativeDestroy(context);
context[0] = 0;
}
@Override
protected void finalize() throws Throwable {
if (context[0] != 0) {
/* TODO: log resource leak? */
destroy();
}
super.finalize();
}
}
}
| java | Apache-2.0 | d65b3d2529c533b16cea08dd7c367cffa1594767 | 2026-01-05T02:41:17.504612Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/test/java/org/moditect/jfranalytics/JfrSchemaFactoryTest.java | src/test/java/org/moditect/jfranalytics/JfrSchemaFactoryTest.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.time.ZonedDateTime;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class JfrSchemaFactoryTest {
@Test
public void canRetrieveTables() throws Exception {
try (Connection connection = getConnection("basic.jfr")) {
DatabaseMetaData md = connection.getMetaData();
try (ResultSet rs = md.getTables(null, "%", "%", null)) {
Set<String> tableNames = new HashSet<>();
while (rs.next()) {
tableNames.add(rs.getString(3));
}
assertThat(tableNames).contains("jdk.GarbageCollection", "jdk.ThreadSleep", "jfrunit.Sync");
}
try (ResultSet rs = md.getColumns(null, "JFR", "jdk.ThreadSleep", null)) {
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("startTime").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("TIMESTAMP(0)").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("duration").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("BIGINT").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("eventThread").describedAs("column name");
assertThat(rs.getString(6)).startsWith("RecordType").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("stackTrace").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("OTHER").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("time").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("BIGINT").describedAs("type name");
assertThat(rs.next()).isFalse();
}
}
try (Connection connection = getConnection("data-types.jfr")) {
DatabaseMetaData md = connection.getMetaData();
try (ResultSet rs = md.getTables(null, "%", "%", null)) {
Set<String> tableNames = new HashSet<>();
while (rs.next()) {
tableNames.add(rs.getString(3));
}
assertThat(tableNames).contains("test.DataTypes");
}
try (ResultSet rs = md.getColumns(null, "JFR", "test.DataTypes", null)) {
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("startTime").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("TIMESTAMP(0)").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("duration").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("BIGINT").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("eventThread").describedAs("column name");
assertThat(rs.getString(6)).startsWith("RecordType").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("stackTrace").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("OTHER").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someBoolean").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("BOOLEAN").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someChar").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("CHAR(1)").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someByte").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("TINYINT").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someShort").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("SMALLINT").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someInt").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("INTEGER").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someLong").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("BIGINT").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someFloat").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("REAL").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someDouble").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("DOUBLE").describedAs("type name");
assertThat(rs.next()).isTrue();
assertThat(rs.getString(4)).isEqualTo("someString").describedAs("column name");
assertThat(rs.getString(6)).isEqualTo("VARCHAR").describedAs("type name");
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canSelectDifferentDataTypes() throws Exception {
try (Connection connection = getConnection("data-types.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT * FROM jfr."test.DataTypes"
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getTimestamp(1)).isEqualTo(Timestamp.from(ZonedDateTime.parse("2021-12-28T17:10:09.724000000+01:00").toInstant()));
assertThat(rs.getBoolean(5)).isTrue();
assertThat(rs.getString(6)).isEqualTo("X");
assertThat(rs.getByte(7)).isEqualTo(Byte.MAX_VALUE);
assertThat(rs.getShort(8)).isEqualTo(Short.MAX_VALUE);
assertThat(rs.getInt(9)).isEqualTo(Integer.MAX_VALUE);
assertThat(rs.getLong(10)).isEqualTo(Long.MAX_VALUE);
assertThat(rs.getFloat(11)).isEqualTo(Float.MAX_VALUE);
assertThat(rs.getDouble(12)).isEqualTo(Double.MAX_VALUE);
assertThat(rs.getString(13)).isEqualTo("SQL rockz");
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canRunSimpleSelectFromThreadSleep() throws Exception {
try (Connection connection = getConnection("basic.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT "startTime", "time", ("eventThread")."javaName", TRUNCATE_STACKTRACE("stackTrace", 7)
FROM jfr."jdk.ThreadSleep"
WHERE "time" = 1000000000
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getTimestamp(1)).isEqualTo(Timestamp.from(ZonedDateTime.parse("2021-12-23T13:40:50.402000000Z").toInstant()));
assertThat(rs.getLong(2)).isEqualTo(1_000_000_000L);
assertThat(rs.getString(3)).isEqualTo("main");
assertThat(rs.getString(4)).isEqualTo("""
java.lang.Thread.sleep(long)
org.moditect.jfrunit.demos.todo.HelloJfrUnitTest.basicTest():24
jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Method, Object, Object[])
jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Object, Object[]):77
jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Object, Object[]):43
java.lang.reflect.Method.invoke(Object, Object[]):568
org.junit.platform.commons.util.ReflectionUtils.invokeMethod(Method, Object, Object[]):688
""");
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canRunSimpleSelectFromGarbageCollection() throws Exception {
try (Connection connection = getConnection("basic.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT "startTime", "duration", "gcId", "name", "cause", "sumOfPauses", "longestPause"
FROM jfr."jdk.GarbageCollection"
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getTimestamp(1)).isEqualTo(Timestamp.from(ZonedDateTime.parse("2021-12-23T13:40:50.384000000Z").toInstant()));
assertThat(rs.getLong(2)).isEqualTo(17717731L);
assertThat(rs.getInt(3)).isEqualTo(2);
assertThat(rs.getString(4)).isEqualTo("G1Full");
assertThat(rs.getString(5)).isEqualTo("System.gc()");
assertThat(rs.getLong(6)).isEqualTo(17717730L);
assertThat(rs.getLong(7)).isEqualTo(17717730L);
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canRunSimpleSelectFromClassLoad() throws Exception {
try (Connection connection = getConnection("class-loading.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT "startTime", "loadedClass", "initiatingClassLoader", "definingClassLoader"
FROM jfr."jdk.ClassLoad"
ORDER by "startTime"
LIMIT 1
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getTimestamp(1)).isEqualTo(Timestamp.from(ZonedDateTime.parse("2021-12-26T17:32:45.428000000+01:00").toInstant()));
assertThat(rs.getString(2)).isEqualTo("""
{
classLoader = null
name = "java/lang/Throwable"
package = {
name = "java/lang"
module = {
name = "java.base"
version = "17"
location = "jrt:/java.base"
classLoader = null
}
exported = true
}
modifiers = 33
hidden = false
}
""");
assertThat(rs.getString(3)).isEqualTo("platform");
assertThat(rs.getString(4)).isEqualTo("bootstrap");
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canRunSimpleSelectFromGcConfiguration() throws Exception {
try (Connection connection = getConnection("gc-configuration.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT *
FROM jfr."jdk.GCConfiguration"
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getTimestamp(1)).isEqualTo(Timestamp.from(ZonedDateTime.parse("2021-12-28T16:13:32.114000000+01:00").toInstant()));
assertThat(rs.getString(2)).isEqualTo("G1New");
assertThat(rs.getString(3)).isEqualTo("G1Old");
assertThat(rs.getInt(4)).isEqualTo(10);
assertThat(rs.getInt(5)).isEqualTo(3);
assertThat(rs.getBoolean(6)).isTrue();
assertThat(rs.getBoolean(7)).isFalse();
assertThat(rs.getBoolean(8)).isFalse();
assertThat(rs.getLong(9)).isEqualTo(Long.MIN_VALUE);
assertThat(rs.getInt(10)).isEqualTo(12);
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canUseGetClassNameFunction() throws Exception {
try (Connection connection = getConnection("class-loading.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT CLASS_NAME("loadedClass") as className
FROM jfr."jdk.ClassLoad"
ORDER by "startTime"
LIMIT 1
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("java.lang.Throwable");
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canRunAggregations() throws Exception {
try (Connection connection = getConnection("basic.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT count(*), sum("time")
FROM jfr."jdk.ThreadSleep"
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getLong(1)).isEqualTo(51);
assertThat(rs.getLong(2)).isEqualTo(5_850_000_000L);
assertThat(rs.next()).isFalse();
}
}
try (Connection connection = getConnection("class-loading.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT "definingClassLoader", count(*) as loadedClasses
FROM jfr."jdk.ClassLoad"
GROUP BY "definingClassLoader"
ORDER BY loadedClasses DESC
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("io.quarkus.bootstrap.classloading.QuarkusClassLoader");
assertThat(rs.getLong(2)).isEqualTo(728);
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("bootstrap");
assertThat(rs.getLong(2)).isEqualTo(625);
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("platform");
assertThat(rs.getLong(2)).isEqualTo(388);
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isNull();
assertThat(rs.getLong(2)).isEqualTo(41);
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("app");
assertThat(rs.getLong(2)).isEqualTo(1);
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canRunSimpleSelectFromObjectAllocation() throws Exception {
try (Connection connection = getConnection("object-allocations.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT TRUNCATE_STACKTRACE("stackTrace", 40), SUM("weight")
FROM jfr."jdk.ObjectAllocationSample"
WHERE "startTime" > (SELECT "startTime" FROM jfr."jfrunit.Reset")
GROUP BY TRUNCATE_STACKTRACE("stackTrace", 40)
ORDER BY SUM("weight") DESC
LIMIT 10
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).startsWith("java.io.BufferedReader.<init>(Reader, int):106");
assertThat(rs.getLong(2)).isEqualTo(311214384);
}
}
}
@Test
public void canUseHasMatchingFrameFunction() throws Exception {
try (Connection connection = getConnection("object-allocations.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT TRUNCATE_STACKTRACE("stackTrace", 10)
FROM jfr."jdk.ObjectAllocationSample"
WHERE "startTime" > (SELECT "startTime" FROM jfr."jfrunit.Reset")
AND HAS_MATCHING_FRAME("stackTrace", '.*java\\.util\\.ArrayList\\.addAll.*')
""");
try (ResultSet rs = statement.executeQuery()) {
int size = 0;
while (rs.next()) {
size++;
}
assertThat(size).isEqualTo(73);
}
}
}
@Test
public void canJoinThreadStartAndStop() throws Exception {
try (Connection connection = getConnection("thread-start-stop.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT ts."parentThread"."javaName", ts."thread"."javaName", ts."thread"."javaThreadId", te."thread"."javaName", te."thread"."javaThreadId"
FROM jfr."jdk.ThreadStart" ts
LEFT JOIN jfr."jdk.ThreadEnd" te ON ts."thread"."javaThreadId" = te."thread"."javaThreadId"
ORDER BY ts."thread"."javaThreadId"
""");
try (ResultSet rs = statement.executeQuery()) {
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("main");
assertThat(rs.getString(2)).isEqualTo("pool-1-thread-1");
assertThat(rs.getLong(3)).isEqualTo(21L);
assertThat(rs.getString(4)).isEqualTo("pool-1-thread-1");
assertThat(rs.getLong(5)).isEqualTo(21L);
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("main");
assertThat(rs.getString(2)).isEqualTo("pool-1-thread-2");
assertThat(rs.getLong(3)).isEqualTo(22L);
assertThat(rs.getString(4)).isEqualTo("pool-1-thread-2");
assertThat(rs.getLong(5)).isEqualTo(22L);
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("Signal Dispatcher");
assertThat(rs.getString(2)).isEqualTo("Attach Listener");
assertThat(rs.getLong(3)).isEqualTo(23L);
assertThat(rs.getString(4)).isNull();
assertThat(rs.getObject(5)).isNull();
assertThat(rs.next()).isTrue();
assertThat(rs.getString(1)).isEqualTo("Attach Listener");
assertThat(rs.getString(2)).isEqualTo("RMI TCP Accept-0");
assertThat(rs.getLong(3)).isEqualTo(24L);
assertThat(rs.getString(4)).isNull();
assertThat(rs.getObject(5)).isNull();
assertThat(rs.next()).isFalse();
}
}
}
@Test
public void canReadAsyncProfilerWallProfile() throws Exception {
try (Connection connection = getConnection("async-profiler-wall.jfr")) {
PreparedStatement statement = connection.prepareStatement("""
SELECT COUNT(*)
FROM jfr."jdk.ExecutionSample"
""");
try (ResultSet rs = statement.executeQuery()) {
rs.next();
assertThat(rs.getInt(1)).isEqualTo(428);
}
}
}
private Connection getConnection(String jfrFileName) throws SQLException {
Path jfrFile = getTestResource(jfrFileName);
Properties properties = new Properties();
properties.put("model", JfrSchemaFactory.getInlineModel(jfrFile));
return DriverManager.getConnection("jdbc:calcite:", properties);
}
private Path getTestResource(String resource) {
try {
Path path = Path.of(JfrSchemaFactoryTest.class.getResource("/" + resource).toURI());
if (!Files.exists(path)) {
throw new IllegalArgumentException("Couldn't find resource: " + path);
}
return path;
}
catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/HasMatchingFrameFunction.java | src/main/java/org/moditect/jfranalytics/HasMatchingFrameFunction.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import java.util.List;
import org.apache.calcite.schema.ScalarFunction;
import org.apache.calcite.schema.impl.ScalarFunctionImpl;
import jdk.jfr.consumer.RecordedFrame;
import jdk.jfr.consumer.RecordedStackTrace;
/**
* Truncates a {@link RecordedStackTrace} to the given maximum depth.
*/
public class HasMatchingFrameFunction {
public static final ScalarFunction INSTANCE = ScalarFunctionImpl.create(HasMatchingFrameFunction.class, "eval");
public boolean eval(Object recordedStackTrace, String pattern) {
if (recordedStackTrace == null) {
return true;
}
if (!(recordedStackTrace instanceof RecordedStackTrace)) {
throw new IllegalArgumentException("Unexpected value type: " + recordedStackTrace);
}
if (pattern == null) {
throw new IllegalArgumentException("A pattern must be given");
}
List<RecordedFrame> frames = ((RecordedStackTrace) recordedStackTrace).getFrames();
for (RecordedFrame recordedFrame : frames) {
String frameAsText = FrameHelper.asText(recordedFrame);
if (frameAsText != null && frameAsText.matches(pattern)) {
return true;
}
}
return false;
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/FrameHelper.java | src/main/java/org/moditect/jfranalytics/FrameHelper.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import jdk.jfr.consumer.RecordedFrame;
import jdk.jfr.consumer.RecordedMethod;
public class FrameHelper {
public static String asText(RecordedFrame frame) {
if (!frame.isJavaFrame() || frame.getMethod().isHidden()) {
return null;
}
StringBuilder builder = new StringBuilder();
RecordedMethod method = frame.getMethod();
builder.append(method.getType().getName());
builder.append('.');
builder.append(method.getName());
builder.append('(');
appendParameters(method.getDescriptor(), builder);
builder.append(')');
int line = frame.getLineNumber();
if (line >= 0) {
builder.append(':').append(line);
}
return builder.toString();
}
/**
* Appends the parameter types to the given builder.
*
* @see https://docs.oracle.com/javase/specs/jvms/se17/html/jvms-4.html#jvms-4.3.3
*/
private static void appendParameters(String methodDescriptor, StringBuilder builder) {
boolean beforeFirstParameter = true;
for (int i = 1; i < methodDescriptor.lastIndexOf(')'); i++) {
if (beforeFirstParameter) {
beforeFirstParameter = false;
}
else {
builder.append(", ");
}
// put array brackets after the type name
int arrayDimension = 0;
while ((methodDescriptor.charAt(i)) == '[') {
arrayDimension++;
i++;
}
char nextChar = methodDescriptor.charAt(i);
switch (nextChar) {
case 'B':
builder.append("byte");
break;
case 'C':
builder.append("char");
break;
case 'D':
builder.append("double");
break;
case 'F':
builder.append("float");
break;
case 'I':
builder.append("int");
break;
case 'J':
builder.append("long");
break;
case 'L':
int typeNameStartIndex = builder.length();
int lastDotIndex = -1;
i++;
// consume type name
while ((nextChar = methodDescriptor.charAt(i)) != ';') {
if (nextChar == '/') {
builder.append('.');
lastDotIndex = builder.length();
}
else {
builder.append(nextChar);
}
i++;
}
// only keep unqualified name
if (lastDotIndex > 0) {
builder.delete(typeNameStartIndex, lastDotIndex);
}
break;
case 'S':
builder.append("short");
break;
case 'Z':
builder.append("boolean");
break;
}
for (int y = 0; y < arrayDimension; y++) {
builder.append("[]");
}
}
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/JfrScannableTable.java | src/main/java/org/moditect/jfranalytics/JfrScannableTable.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import java.nio.file.Path;
import org.apache.calcite.DataContext;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.schema.ScannableTable;
import org.apache.calcite.schema.impl.AbstractTable;
import org.checkerframework.checker.nullness.qual.Nullable;
import jdk.jfr.EventType;
public class JfrScannableTable extends AbstractTable implements ScannableTable {
private final Path jfrFile;
private final EventType eventType;
private final RelDataType rowType;
private final AttributeValueConverter[] converters;
public JfrScannableTable(Path jfrFile, EventType eventType, RelDataType rowType, AttributeValueConverter[] converters) {
this.jfrFile = jfrFile;
this.eventType = eventType;
this.rowType = rowType;
this.converters = converters;
}
@Override
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
return rowType;
}
@Override
public Enumerable<@Nullable Object[]> scan(DataContext root) {
return new JfrEnumerable(jfrFile, eventType, converters);
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/AttributeValueConverter.java | src/main/java/org/moditect/jfranalytics/AttributeValueConverter.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import jdk.jfr.consumer.RecordedEvent;
public interface AttributeValueConverter {
Object getValue(RecordedEvent event);
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/TruncateStackTraceFunction.java | src/main/java/org/moditect/jfranalytics/TruncateStackTraceFunction.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import java.util.List;
import org.apache.calcite.schema.ScalarFunction;
import org.apache.calcite.schema.impl.ScalarFunctionImpl;
import jdk.jfr.consumer.RecordedFrame;
import jdk.jfr.consumer.RecordedStackTrace;
/**
* Truncates a {@link RecordedStackTrace} to the given maximum depth.
*/
public class TruncateStackTraceFunction {
public static final ScalarFunction INSTANCE = ScalarFunctionImpl.create(TruncateStackTraceFunction.class, "eval");
public String eval(Object recordedStackTrace, int depth) {
if (recordedStackTrace == null) {
return null;
}
if (!(recordedStackTrace instanceof RecordedStackTrace)) {
throw new IllegalArgumentException("Unexpected value type: " + recordedStackTrace);
}
if (depth < 1) {
throw new IllegalArgumentException("At least one frame must be retained");
}
List<RecordedFrame> frames = ((RecordedStackTrace) recordedStackTrace).getFrames();
StringBuilder builder = new StringBuilder();
int i = 0;
while (i < depth && i < frames.size()) {
builder.append(FrameHelper.asText(frames.get(i)));
builder.append(System.lineSeparator());
i++;
}
return builder.toString();
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/GetClassNameFunction.java | src/main/java/org/moditect/jfranalytics/GetClassNameFunction.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import org.apache.calcite.schema.ScalarFunction;
import org.apache.calcite.schema.impl.ScalarFunctionImpl;
import jdk.jfr.consumer.RecordedClass;
public class GetClassNameFunction {
public static final ScalarFunction INSTANCE = ScalarFunctionImpl.create(GetClassNameFunction.class, "eval");
public String eval(Object recordedClass) {
if (!(recordedClass instanceof RecordedClass)) {
throw new IllegalArgumentException("Unexpected value type: " + recordedClass);
}
return ((RecordedClass) recordedClass).getName();
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/JfrEnumerable.java | src/main/java/org/moditect/jfranalytics/JfrEnumerable.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.linq4j.AbstractEnumerable;
import org.apache.calcite.linq4j.Enumerator;
import org.apache.calcite.linq4j.Linq4j;
import jdk.jfr.EventType;
import jdk.jfr.consumer.EventStream;
public class JfrEnumerable extends AbstractEnumerable<Object[]> {
private final Path jfrFile;
private final EventType eventType;
private final AttributeValueConverter[] converters;
public JfrEnumerable(Path jfrFile, EventType eventType, AttributeValueConverter[] converters) {
this.jfrFile = jfrFile;
this.eventType = eventType;
this.converters = converters;
;
}
@Override
public Enumerator<Object[]> enumerator() {
try (var es = EventStream.openFile(jfrFile)) {
List<Object[]> results = new ArrayList<>();
es.onEvent(eventType.getName(), event -> {
Object[] row = new Object[converters.length];
for (int i = 0; i < converters.length; i++) {
row[i] = converters[i].getValue(event);
}
results.add(row);
});
es.start();
return Linq4j.enumerator(results);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/JfrSchemaFactory.java | src/main/java/org/moditect/jfranalytics/JfrSchemaFactory.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Map;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaFactory;
import org.apache.calcite.schema.SchemaPlus;
public class JfrSchemaFactory implements SchemaFactory {
public static final String INLINE_MODEL = """
inline: {
version: '1.0',
defaultSchema: 'JFR',
schemas: [
{
name: 'JFR',
type: 'custom',
factory: 'org.moditect.jfranalytics.JfrSchemaFactory',
operand: {
file: '%s'
}
}
]
}
""";
public static String getInlineModel(Path jfrFile) {
return INLINE_MODEL.formatted(jfrFile
.toAbsolutePath()
.toString()
.replace("\\", "\\\\"));
}
@Override
public Schema create(SchemaPlus parentSchema, String name, Map<String, Object> operand) {
String file = (String) operand.get("file");
if (file == null) {
throw new IllegalArgumentException("Please specify a JFR file name via the 'file' operand");
}
Path jfrFile = new File(file).toPath().toAbsolutePath();
if (!Files.exists(jfrFile)) {
throw new IllegalArgumentException("Given JFR file doesn't exist: " + jfrFile);
}
return new JfrSchema(jfrFile);
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
moditect/jfr-analytics | https://github.com/moditect/jfr-analytics/blob/352daa673e22e62c2bf4efe42b4d01b1d3c83d01/src/main/java/org/moditect/jfranalytics/JfrSchema.java | src/main/java/org/moditect/jfranalytics/JfrSchema.java | /*
* Copyright 2021 - 2023 The original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moditect.jfranalytics;
import java.io.IOException;
import java.lang.System.Logger.Level;
import java.nio.file.Path;
import java.sql.Timestamp;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelProtoDataType;
import org.apache.calcite.schema.Function;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.SchemaVersion;
import org.apache.calcite.schema.Schemas;
import org.apache.calcite.schema.Table;
import org.checkerframework.checker.nullness.qual.Nullable;
import jdk.jfr.EventType;
import jdk.jfr.Timespan;
import jdk.jfr.ValueDescriptor;
import jdk.jfr.consumer.*;
import jdk.jfr.consumer.EventStream;
import jdk.jfr.consumer.RecordedClassLoader;
import jdk.jfr.consumer.RecordedStackTrace;
public class JfrSchema implements Schema {
private static final System.Logger LOGGER = System.getLogger(JfrSchema.class.getName());
private static final int LOCAL_OFFSET = TimeZone.getDefault().getOffset(System.currentTimeMillis());
private final Map<String, JfrScannableTable> tableTypes;
public JfrSchema(Path jfrFile) {
this.tableTypes = Collections.unmodifiableMap(getTableTypes(jfrFile));
}
private static Map<String, JfrScannableTable> getTableTypes(Path jfrFile) {
try (var es = EventStream.openFile(jfrFile)) {
RelDataTypeFactory typeFactory = new JavaTypeFactoryImpl();
Map<String, JfrScannableTable> tableTypes = new HashMap<>();
es.onMetadata(event -> {
for (EventType eventType : event.getEventTypes()) {
if (!tableTypes.containsKey(eventType.getName())) {
RelDataTypeFactory.Builder builder = new RelDataTypeFactory.Builder(typeFactory);
List<AttributeValueConverter> converters = new ArrayList<>();
for (ValueDescriptor field : eventType.getFields()) {
RelDataType type = getRelDataType(eventType, field, typeFactory);
if (type == null) {
continue;
}
if (type.getSqlTypeName().toString().equals("ROW")) {
builder.add(field.getName(), type).nullable(true);
}
else {
builder.add(field.getName(), type.getSqlTypeName()).nullable(true);
}
converters.add(getConverter(field, type));
}
tableTypes.put(eventType.getName(),
new JfrScannableTable(jfrFile, eventType, builder.build(), converters.toArray(new AttributeValueConverter[0])));
}
}
});
es.start();
return tableTypes;
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
private static RelDataType getRelDataType(EventType eventType, ValueDescriptor field, RelDataTypeFactory typeFactory) {
RelDataType type;
switch (field.getTypeName()) {
case "boolean":
type = typeFactory.createJavaType(boolean.class);
break;
case "char":
type = typeFactory.createJavaType(char.class);
break;
case "byte":
type = typeFactory.createJavaType(byte.class);
break;
case "short":
type = typeFactory.createJavaType(short.class);
break;
case "int":
type = typeFactory.createJavaType(int.class);
break;
case "long":
if ("jdk.jfr.Timestamp".equals(field.getContentType())) {
type = typeFactory.createJavaType(Timestamp.class);
}
else {
type = typeFactory.createJavaType(long.class);
}
break;
case "float":
type = typeFactory.createJavaType(float.class);
break;
case "double":
type = typeFactory.createJavaType(double.class);
break;
case "java.lang.Class":
type = typeFactory.createJavaType(RecordedClass.class);
break;
case "java.lang.String":
type = typeFactory.createJavaType(String.class);
break;
case "java.lang.Thread":
List<RelDataType> types = Arrays.asList(typeFactory.createJavaType(String.class), typeFactory.createJavaType(long.class),
typeFactory.createJavaType(String.class), typeFactory.createJavaType(long.class), typeFactory.createJavaType(String.class));
List<String> names = Arrays.asList("osName", "osThreadId", "javaName", "javaThreadId", "group");
type = typeFactory.createStructType(types, names);
break;
case "jdk.types.ClassLoader":
type = typeFactory.createJavaType(String.class);
break;
case "jdk.types.StackTrace":
type = typeFactory.createJavaType(RecordedStackTrace.class);
break;
default:
LOGGER.log(Level.WARNING, "Unknown type of attribute {0}::{1}: {2}", eventType.getName(), field.getName(), field.getTypeName());
type = null;
}
return type;
}
private static AttributeValueConverter getConverter(ValueDescriptor field, RelDataType type) {
// 1. common attributes
// timestamps are adjusted by Calcite using local TZ offset; account for that
if (field.getName().equals("startTime")) {
return event -> event.getStartTime().toEpochMilli() + LOCAL_OFFSET;
}
else if (field.getName().equals("duration")) {
return event -> event.getDuration().toNanos();
}
else if (field.getName().equals("stackTrace")) {
return event -> event.getStackTrace();
}
// 2. special value types
else if (field.getTypeName().equals("java.lang.Class")) {
return event -> event.getClass(field.getName());
}
else if (field.getTypeName().equals("jdk.types.ClassLoader")) {
return event -> {
RecordedClassLoader recordedClassLoader = (RecordedClassLoader) event.getValue(field.getName());
if (recordedClassLoader == null) {
return null;
}
else if (recordedClassLoader.getName() != null) {
return recordedClassLoader.getName();
}
else {
RecordedClass classLoaderType = recordedClassLoader.getType();
return classLoaderType != null ? classLoaderType.getName() : null;
}
};
}
else if (field.getTypeName().equals("java.lang.Thread")) {
return event -> {
RecordedThread recordedThread = (RecordedThread) event.getValue(field.getName());
return new Object[]{
recordedThread.getOSName(),
recordedThread.getOSThreadId(),
recordedThread.getJavaName(),
recordedThread.getJavaThreadId(),
recordedThread.getThreadGroup() != null ? recordedThread.getThreadGroup().getName() : null,
};
};
}
// 3. further special cases
else if (field.getAnnotation(Timespan.class) != null) {
return event -> {
Duration duration = event.getDuration(field.getName());
// Long.MIN_VALUE is used as a sentinel value for absent values e.g. for jdk.GCConfiguration.pauseTarget
// TODO: handle nanos value overflow
return duration.getSeconds() == Long.MIN_VALUE ? Long.MIN_VALUE : duration.toNanos();
};
}
// 4. default pass-through
else {
return event -> event.getValue(field.getName());
}
}
@Override
public @Nullable Table getTable(String name) {
return tableTypes.get(name);
}
@Override
public Set<String> getTableNames() {
return tableTypes.keySet();
}
@Override
public @Nullable RelProtoDataType getType(String name) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public Set<String> getTypeNames() {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public Collection<Function> getFunctions(String name) {
if (name.equals("CLASS_NAME")) {
return Collections.singleton(GetClassNameFunction.INSTANCE);
}
else if (name.equals("TRUNCATE_STACKTRACE")) {
return Collections.singleton(TruncateStackTraceFunction.INSTANCE);
}
else if (name.equals("HAS_MATCHING_FRAME")) {
return Collections.singleton(HasMatchingFrameFunction.INSTANCE);
}
return Collections.emptySet();
}
@Override
public Set<String> getFunctionNames() {
return Set.of("CLASS_NAME", "TRUNCATE_STACKTRACE", "HAS_MATCHING_FRAME");
}
@Override
public @Nullable Schema getSubSchema(String name) {
return null;
}
@Override
public Set<String> getSubSchemaNames() {
return Collections.emptySet();
}
@Override
public Expression getExpression(@Nullable SchemaPlus parentSchema, String name) {
return Schemas.subSchemaExpression(parentSchema, name, getClass());
}
@Override
public boolean isMutable() {
return false;
}
@Override
public Schema snapshot(SchemaVersion version) {
return this;
}
}
| java | Apache-2.0 | 352daa673e22e62c2bf4efe42b4d01b1d3c83d01 | 2026-01-05T02:41:10.024232Z | false |
akshetpandey/react-native-cronet | https://github.com/akshetpandey/react-native-cronet/blob/486391286a328f760ee620157f2c10859307d6fd/example/android/app/src/main/java/com/example/MainActivity.java | example/android/app/src/main/java/com/example/MainActivity.java | package com.example;
import android.os.Bundle;
import com.akshetpandey.rncronet.RNCronetNetworkingModule;
import com.facebook.react.ReactActivity;
import com.facebook.soloader.SoLoader;
import org.chromium.net.CronetEngine;
import java.io.File;
import java.net.URL;
public class MainActivity extends ReactActivity {
/**
* Returns the name of the main component registered from JavaScript.
* This is used to schedule rendering of the component.
*/
@Override
protected String getMainComponentName() {
return "example";
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
RNCronetNetworkingModule.setCustomCronetBuilder(context -> {
File cacheDir = new File(context.getCacheDir(), "cronet-cache");
cacheDir.mkdirs();
CronetEngine cronetEngine = new CronetEngine.Builder(context)
.enableBrotli(true)
.enableHttp2(true)
.enableQuic(true)
.setLibraryLoader(new CronetEngine.Builder.LibraryLoader() {
@Override
public void loadLibrary(String libName) {
SoLoader.loadLibrary(libName);
}
})
.setStoragePath(cacheDir.getAbsolutePath())
.enableHttpCache(CronetEngine.Builder.HTTP_CACHE_DISK, 10 * 1024 * 1024)
.addQuicHint("www.google.com", 443, 443)
.addQuicHint("encrypted-tbn0.gstatic.com", 443, 443)
.enablePublicKeyPinningBypassForLocalTrustAnchors(true)
.build();
URL.setURLStreamHandlerFactory(cronetEngine.createURLStreamHandlerFactory());
cronetEngine.startNetLogToFile(context.getCacheDir().getPath() + "/netlog.json", false);
return cronetEngine;
});
}
@Override
protected void onStop() {
super.onStop();
CronetEngine engine = RNCronetNetworkingModule.cronetEngine();
if (engine != null) {
engine.stopNetLog();
}
}
}
| java | MIT | 486391286a328f760ee620157f2c10859307d6fd | 2026-01-05T02:41:19.160773Z | false |
akshetpandey/react-native-cronet | https://github.com/akshetpandey/react-native-cronet/blob/486391286a328f760ee620157f2c10859307d6fd/example/android/app/src/main/java/com/example/MainApplication.java | example/android/app/src/main/java/com/example/MainApplication.java | package com.example;
import android.app.Application;
import android.content.Context;
import com.akshetpandey.rncronet.RNCronetFrescoImagePipelineConfig;
import com.akshetpandey.rncronet.RNCronetNetworkingPackage;
import com.facebook.imagepipeline.core.ImagePipelineConfig;
import com.facebook.react.PackageList;
import com.facebook.react.ReactApplication;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactPackage;
import com.facebook.react.shell.MainPackageConfig;
import com.facebook.soloader.SoLoader;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
public class MainApplication extends Application implements ReactApplication {
private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) {
@Override
public boolean getUseDeveloperSupport() {
return BuildConfig.DEBUG;
}
@Override
protected List<ReactPackage> getPackages() {
ImagePipelineConfig pipelineConfig = RNCronetFrescoImagePipelineConfig.build(getApplicationContext());
MainPackageConfig config = new MainPackageConfig.Builder().setFrescoConfig(pipelineConfig).build();
List<ReactPackage> packages = new PackageList(this, config).getPackages();
// Packages that cannot be autolinked yet can be added manually here, for example:
packages.add(new RNCronetNetworkingPackage());
return packages;
}
@Override
protected String getJSMainModuleName() {
return "index";
}
};
@Override
public ReactNativeHost getReactNativeHost() {
return mReactNativeHost;
}
@Override
public void onCreate() {
super.onCreate();
SoLoader.init(this, /* native exopackage */ false);
initializeFlipper(this, getReactNativeHost().getReactInstanceManager());
}
/**
* Loads Flipper in React Native templates. Call this in the onCreate method with something like
* initializeFlipper(this, getReactNativeHost().getReactInstanceManager());
*
* @param context
*/
private static void initializeFlipper(
Context context, ReactInstanceManager reactInstanceManager) {
if (BuildConfig.DEBUG) {
try {
/*
We use reflection here to pick up the class that initializes Flipper,
since Flipper library is not available in release mode
*/
Class<?> aClass = Class.forName("com.example.ReactNativeFlipper");
aClass
.getMethod("initializeFlipper", Context.class, ReactInstanceManager.class)
.invoke(null, context, reactInstanceManager);
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
}
| java | MIT | 486391286a328f760ee620157f2c10859307d6fd | 2026-01-05T02:41:19.160773Z | false |
akshetpandey/react-native-cronet | https://github.com/akshetpandey/react-native-cronet/blob/486391286a328f760ee620157f2c10859307d6fd/example/android/app/src/debug/java/com/example/ReactNativeFlipper.java | example/android/app/src/debug/java/com/example/ReactNativeFlipper.java | package com.example;
import android.content.Context;
import com.facebook.flipper.android.AndroidFlipperClient;
import com.facebook.flipper.android.utils.FlipperUtils;
import com.facebook.flipper.core.FlipperClient;
import com.facebook.flipper.plugins.crashreporter.CrashReporterPlugin;
import com.facebook.flipper.plugins.databases.DatabasesFlipperPlugin;
import com.facebook.flipper.plugins.fresco.FrescoFlipperPlugin;
import com.facebook.flipper.plugins.inspector.DescriptorMapping;
import com.facebook.flipper.plugins.inspector.InspectorFlipperPlugin;
import com.facebook.flipper.plugins.network.FlipperOkhttpInterceptor;
import com.facebook.flipper.plugins.network.NetworkFlipperPlugin;
import com.facebook.flipper.plugins.react.ReactFlipperPlugin;
import com.facebook.flipper.plugins.sharedpreferences.SharedPreferencesFlipperPlugin;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.modules.network.NetworkingModule;
public class ReactNativeFlipper {
public static void initializeFlipper(Context context, ReactInstanceManager reactInstanceManager) {
if (FlipperUtils.shouldEnableFlipper(context)) {
final FlipperClient client = AndroidFlipperClient.getInstance(context);
client.addPlugin(new InspectorFlipperPlugin(context, DescriptorMapping.withDefaults()));
client.addPlugin(new ReactFlipperPlugin());
client.addPlugin(new DatabasesFlipperPlugin(context));
client.addPlugin(new SharedPreferencesFlipperPlugin(context));
client.addPlugin(CrashReporterPlugin.getInstance());
NetworkFlipperPlugin networkFlipperPlugin = new NetworkFlipperPlugin();
NetworkingModule.setCustomClientBuilder(
builder -> builder.addNetworkInterceptor(new FlipperOkhttpInterceptor(networkFlipperPlugin)));
client.addPlugin(networkFlipperPlugin);
client.start();
// Fresco Plugin needs to ensure that ImagePipelineFactory is initialized
// Hence we run if after all native modules have been initialized
ReactContext reactContext = reactInstanceManager.getCurrentReactContext();
if (reactContext == null) {
reactInstanceManager.addReactInstanceEventListener(
new ReactInstanceManager.ReactInstanceEventListener() {
@Override
public void onReactContextInitialized(ReactContext reactContext) {
reactInstanceManager.removeReactInstanceEventListener(this);
reactContext.runOnNativeModulesQueueThread(
() -> client.addPlugin(new FrescoFlipperPlugin()));
}
});
} else {
client.addPlugin(new FrescoFlipperPlugin());
}
}
}
}
| java | MIT | 486391286a328f760ee620157f2c10859307d6fd | 2026-01-05T02:41:19.160773Z | false |
akshetpandey/react-native-cronet | https://github.com/akshetpandey/react-native-cronet/blob/486391286a328f760ee620157f2c10859307d6fd/android/src/main/java/com/akshetpandey/rncronet/RNCronetUrlRequestCallback.java | android/src/main/java/com/akshetpandey/rncronet/RNCronetUrlRequestCallback.java | package com.akshetpandey.rncronet;
import android.os.ConditionVariable;
import android.util.Log;
import androidx.annotation.Nullable;
import com.facebook.react.modules.network.OkHttpClientProvider;
import org.chromium.net.CronetException;
import org.chromium.net.UrlRequest;
import org.chromium.net.UrlResponseInfo;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.WritableByteChannel;
import java.util.List;
import java.util.Map;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.EventListener;
import okhttp3.Headers;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Protocol;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.ResponseBody;
class RNCronetUrlRequestCallback extends UrlRequest.Callback {
private static final String TAG = "Callback";
private static final int MAX_FOLLOW_COUNT = 20;
private Request originalRequest;
private Call mCall;
@Nullable
private EventListener eventListener;
@Nullable
private Callback responseCallback;
private int followCount;
private Response mResponse;
@Nullable
private IOException mException;
private ConditionVariable mResponseConditon = new ConditionVariable();
private ByteArrayOutputStream mBytesReceived = new ByteArrayOutputStream();
private WritableByteChannel mReceiveChannel = Channels.newChannel(mBytesReceived);
RNCronetUrlRequestCallback(Request request, Call call) {
this(request, call, null, null);
}
RNCronetUrlRequestCallback(Request request, Call call, @Nullable EventListener eventListener, @Nullable Callback responseCallback) {
originalRequest = request;
mCall = call;
mResponse = new Response.Builder()
.sentRequestAtMillis(System.currentTimeMillis())
.request(request)
.protocol(Protocol.HTTP_1_0)
.code(0)
.message("")
.build();
this.responseCallback = responseCallback;
this.eventListener = eventListener;
}
private static Protocol protocolFromNegotiatedProtocol(UrlResponseInfo responseInfo) {
String negotiatedProtocol = responseInfo.getNegotiatedProtocol().toLowerCase();
if (negotiatedProtocol.contains("quic")) {
return Protocol.QUIC;
} else if (negotiatedProtocol.contains("spdy")) {
return Protocol.SPDY_3;
} else if (negotiatedProtocol.contains("h2")) {
return Protocol.HTTP_2;
} else if (negotiatedProtocol.contains("1.1")) {
return Protocol.HTTP_1_1;
} else {
return Protocol.HTTP_1_0;
}
}
private static Headers headersFromResponse(UrlResponseInfo responseInfo) {
List<Map.Entry<String, String>> headers = responseInfo.getAllHeadersAsList();
Headers.Builder headerBuilder = new Headers.Builder();
for (Map.Entry<String, String> entry : headers) {
try {
if (entry.getKey().equalsIgnoreCase("content-encoding")) {
// Strip all content encoding headers as decoding is done handled by cronet
continue;
}
headerBuilder.add(entry.getKey(), entry.getValue());
} catch (Exception e) {
Log.w(TAG, "Invalid HTTP header/value: " + entry.getKey() + entry.getValue());
// Ignore that header
}
}
return headerBuilder.build();
}
private static Response responseFromResponse(Response response, UrlResponseInfo responseInfo) {
Protocol protocol = protocolFromNegotiatedProtocol(responseInfo);
Headers headers = headersFromResponse(responseInfo);
return response.newBuilder()
.receivedResponseAtMillis(System.currentTimeMillis())
.protocol(protocol)
.code(responseInfo.getHttpStatusCode())
.message(responseInfo.getHttpStatusText())
.headers(headers)
.build();
}
Response waitForDone() throws IOException {
mResponseConditon.block();
if (mException != null) {
throw mException;
}
return mResponse;
}
@Override
public void onRedirectReceived(UrlRequest request, UrlResponseInfo info, String newLocationUrl) {
if (followCount > MAX_FOLLOW_COUNT) {
request.cancel();
}
followCount += 1;
OkHttpClient client = OkHttpClientProvider.createClient();
if (originalRequest.url().isHttps() && newLocationUrl.startsWith("http://") && client.followSslRedirects()) {
request.followRedirect();
} else if (!originalRequest.url().isHttps() && newLocationUrl.startsWith("https://") && client.followSslRedirects()) {
request.followRedirect();
} else if (client.followRedirects()) {
request.followRedirect();
} else {
request.cancel();
}
}
@Override
public void onResponseStarted(UrlRequest request, UrlResponseInfo info) {
mResponse = responseFromResponse(mResponse, info);
if (eventListener != null) {
eventListener.responseHeadersEnd(mCall, mResponse);
eventListener.responseBodyStart(mCall);
}
request.read(ByteBuffer.allocateDirect(32 * 1024));
}
@Override
public void onReadCompleted(UrlRequest request, UrlResponseInfo info, ByteBuffer byteBuffer) throws Exception {
byteBuffer.flip();
try {
mReceiveChannel.write(byteBuffer);
} catch (IOException e) {
Log.i(TAG, "IOException during ByteBuffer read. Details: ", e);
throw e;
}
byteBuffer.clear();
request.read(byteBuffer);
}
@Override
public void onSucceeded(UrlRequest request, UrlResponseInfo info) {
if (eventListener != null) {
eventListener.responseBodyEnd(mCall, info.getReceivedByteCount());
}
String contentTypeString = mResponse.header("content-type");
MediaType contentType = MediaType.parse(contentTypeString != null ? contentTypeString : "text/plain; charset=\"utf-8\"");
ResponseBody responseBody = ResponseBody.create(contentType, mBytesReceived.toByteArray());
Request newRequest = originalRequest.newBuilder().url(info.getUrl()).build();
mResponse = mResponse.newBuilder().body(responseBody).request(newRequest).build();
mResponseConditon.open();
if (eventListener != null) {
eventListener.callEnd(mCall);
}
if (responseCallback != null) {
try {
responseCallback.onResponse(mCall, mResponse);
} catch (IOException e) {
// Pass?
}
}
}
@Override
public void onFailed(UrlRequest request, UrlResponseInfo info, CronetException error) {
IOException e = new IOException("Cronet Exception Occurred", error);
mException = e;
mResponseConditon.open();
if (eventListener != null) {
eventListener.callFailed(mCall, e);
}
if (responseCallback != null) {
responseCallback.onFailure(mCall, e);
}
}
@Override
public void onCanceled(UrlRequest request, UrlResponseInfo info) {
mResponseConditon.open();
if (eventListener != null) {
eventListener.callEnd(mCall);
}
}
}
| java | MIT | 486391286a328f760ee620157f2c10859307d6fd | 2026-01-05T02:41:19.160773Z | false |
akshetpandey/react-native-cronet | https://github.com/akshetpandey/react-native-cronet/blob/486391286a328f760ee620157f2c10859307d6fd/android/src/main/java/com/akshetpandey/rncronet/RNCronetInterceptor.java | android/src/main/java/com/akshetpandey/rncronet/RNCronetInterceptor.java | package com.akshetpandey.rncronet;
import org.chromium.net.UrlRequest;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.internal.annotations.EverythingIsNonNull;
@EverythingIsNonNull
class RNCronetInterceptor implements okhttp3.Interceptor {
@Override
public Response intercept(Chain chain) throws IOException {
if (RNCronetNetworkingModule.cronetEngine() != null) {
return proceedWithCronet(chain.request(), chain.call());
} else {
return chain.proceed(chain.request());
}
}
private Response proceedWithCronet(Request request, Call call) throws IOException {
RNCronetUrlRequestCallback callback = new RNCronetUrlRequestCallback(request, call);
UrlRequest urlRequest = RNCronetNetworkingModule.buildRequest(request, callback);
urlRequest.start();
return callback.waitForDone();
}
}
| java | MIT | 486391286a328f760ee620157f2c10859307d6fd | 2026-01-05T02:41:19.160773Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.